diff --git a/src/endpoint/s3/ops/s3_get_object.js b/src/endpoint/s3/ops/s3_get_object.js index 6a183fa336..69c2391817 100644 --- a/src/endpoint/s3/ops/s3_get_object.js +++ b/src/endpoint/s3/ops/s3_get_object.js @@ -49,6 +49,7 @@ async function get_object(req, res) { } } http_utils.set_response_headers_from_request(req, res); + if (!version_id) await http_utils.set_expiration_header(req, res, object_md); // setting expiration header for bucket lifecycle const obj_size = object_md.size; const params = { object_md, diff --git a/src/endpoint/s3/ops/s3_head_object.js b/src/endpoint/s3/ops/s3_head_object.js index 1cd3543c3a..c34db17d6a 100644 --- a/src/endpoint/s3/ops/s3_head_object.js +++ b/src/endpoint/s3/ops/s3_head_object.js @@ -29,6 +29,7 @@ async function head_object(req, res) { s3_utils.set_response_object_md(res, object_md); s3_utils.set_encryption_response_headers(req, res, object_md.encryption); http_utils.set_response_headers_from_request(req, res); + if (!params.version_id) await http_utils.set_expiration_header(req, res, object_md); // setting expiration header for bucket lifecycle } module.exports = { diff --git a/src/endpoint/s3/ops/s3_put_object.js b/src/endpoint/s3/ops/s3_put_object.js index d8bbdddd1e..17389e05c7 100644 --- a/src/endpoint/s3/ops/s3_put_object.js +++ b/src/endpoint/s3/ops/s3_put_object.js @@ -81,6 +81,14 @@ async function put_object(req, res) { } res.setHeader('ETag', `"${reply.etag}"`); + const object_info = { + key: req.params.key, + create_time: new Date().getTime(), + size: size, + tagging: tagging, + }; + await http_utils.set_expiration_header(req, res, object_info); // setting expiration header for bucket lifecycle + if (reply.seq) { res.seq = reply.seq; delete reply.seq; diff --git a/src/manage_nsfs/nc_lifecycle.js b/src/manage_nsfs/nc_lifecycle.js index f679aab3c3..83a1ccc0cb 100644 --- a/src/manage_nsfs/nc_lifecycle.js +++ b/src/manage_nsfs/nc_lifecycle.js @@ -271,7 +271,7 @@ class NCLifecycle { if (candidates.delete_candidates?.length > 0) { const expiration = lifecycle_rule.expiration ? this._get_expiration_time(lifecycle_rule.expiration) : 0; - const filter_func = this._build_lifecycle_filter({filter: lifecycle_rule.filter, expiration}); + const filter_func = lifecycle_utils.build_lifecycle_filter({filter: lifecycle_rule.filter, expiration}); dbg.log0('process_rule: calling delete_multiple_objects, num of objects to be deleted', candidates.delete_candidates.length); const delete_res = await this._call_op_and_update_status({ bucket_name, @@ -478,7 +478,7 @@ class NCLifecycle { if (rule_state.is_finished) return []; const expiration = this._get_expiration_time(lifecycle_rule.expiration); if (expiration < 0) return []; - const filter_func = this._build_lifecycle_filter({filter: lifecycle_rule.filter, expiration}); + const filter_func = lifecycle_utils.build_lifecycle_filter({filter: lifecycle_rule.filter, expiration}); const filtered_objects = []; // TODO list_objects does not accept a filter and works in batch sizes of 1000. should handle batching @@ -537,7 +537,7 @@ class NCLifecycle { const versions_list = params.versions_list; const candidates = []; const expiration = lifecycle_rule.expiration?.days ? this._get_expiration_time(lifecycle_rule.expiration) : 0; - const filter_func = this._build_lifecycle_filter({filter: lifecycle_rule.filter, expiration}); + const filter_func = lifecycle_utils.build_lifecycle_filter({filter: lifecycle_rule.filter, expiration}); for (let i = 0; i < versions_list.objects.length - 1; i++) { if (this.filter_expired_delete_marker(versions_list.objects[i], versions_list.objects[i + 1], filter_func)) { candidates.push(versions_list.objects[i]); @@ -640,7 +640,7 @@ class NCLifecycle { } const versions_list = params.versions_list; - const filter_func = this._build_lifecycle_filter({filter: lifecycle_rule.filter, expiration: 0}); + const filter_func = lifecycle_utils.build_lifecycle_filter({filter: lifecycle_rule.filter, expiration: 0}); const num_newer_versions = lifecycle_rule.noncurrent_version_expiration.newer_noncurrent_versions; const num_non_current_days = lifecycle_rule.noncurrent_version_expiration.noncurrent_days; const delete_candidates = []; @@ -674,7 +674,7 @@ class NCLifecycle { const expiration = lifecycle_rule.abort_incomplete_multipart_upload.days_after_initiation; const res = []; - const filter_func = this._build_lifecycle_filter({filter, expiration}); + const filter_func = lifecycle_utils.build_lifecycle_filter({filter, expiration}); let dir_handle; //TODO this is almost identical to list_uploads except for error handling and support for pagination. should modify list-upload and use it in here instead try { @@ -720,29 +720,6 @@ class NCLifecycle { ///////// FILTER HELPERS //////// //////////////////////////////////// - /** - * @typedef {{ - * filter: Object - * expiration: Number - * }} filter_params - * - * @param {filter_params} params - * @returns - */ - _build_lifecycle_filter(params) { - /** - * @param {Object} object_info - */ - return function(object_info) { - if (params.filter?.prefix && !object_info.key.startsWith(params.filter.prefix)) return false; - if (params.expiration && object_info.age < params.expiration) return false; - if (params.filter?.tags && !_file_contain_tags(object_info, params.filter.tags)) return false; - if (params.filter?.object_size_greater_than && object_info.size < params.filter.object_size_greater_than) return false; - if (params.filter?.object_size_less_than && object_info.size > params.filter.object_size_less_than) return false; - return true; - }; - } - /** * get the expiration time in days of an object * if rule is set with date, then rule is applied for all objects after that date @@ -1468,38 +1445,6 @@ class NCLifecycle { } } -////////////////// -// TAGS HELPERS // -////////////////// - -/** - * checks if tag query_tag is in the list tag_set - * @param {Object} query_tag - * @param {Array} tag_set - */ -function _list_contain_tag(query_tag, tag_set) { - for (const t of tag_set) { - if (t.key === query_tag.key && t.value === query_tag.value) return true; - } - return false; -} - -/** - * checks if object has all the tags in filter_tags - * @param {Object} object_info - * @param {Array} filter_tags - * @returns - */ -function _file_contain_tags(object_info, filter_tags) { - if (object_info.tags === undefined) return false; - for (const tag of filter_tags) { - if (!_list_contain_tag(tag, object_info.tags)) { - return false; - } - } - return true; -} - // EXPORTS exports.NCLifecycle = NCLifecycle; exports.ILM_POLICIES_TMP_DIR = ILM_POLICIES_TMP_DIR; diff --git a/src/test/unit_tests/jest_tests/test_nc_lifecycle.test.js b/src/test/unit_tests/jest_tests/test_nc_lifecycle.test.js index cc2c5ce4d2..a61c36e77f 100644 --- a/src/test/unit_tests/jest_tests/test_nc_lifecycle.test.js +++ b/src/test/unit_tests/jest_tests/test_nc_lifecycle.test.js @@ -9,10 +9,9 @@ const path = require('path'); const crypto = require('crypto'); const config = require('../../../../config'); const fs_utils = require('../../../util/fs_utils'); -const { ConfigFS } = require('../../../sdk/config_fs'); const NamespaceFS = require('../../../sdk/namespace_fs'); const buffer_utils = require('../../../util/buffer_utils'); -const { NCLifecycle } = require('../../../manage_nsfs/nc_lifecycle'); +const lifecycle_utils = require('../../../../src/util/lifecycle_utils'); const endpoint_stats_collector = require('../../../sdk/endpoint_stats_collector'); const { TMP_PATH, set_nc_config_dir_in_config, TEST_TIMEOUT } = require('../../system_tests/test_utils'); @@ -21,9 +20,7 @@ const config_root = path.join(TMP_PATH, 'config_root_nc_lifecycle'); const root_path = path.join(TMP_PATH, 'root_path_nc_lifecycle/'); const bucket_name = 'lifecycle_bucket'; const bucket_path = path.join(root_path, bucket_name); -const config_fs = new ConfigFS(config_root); const dummy_object_sdk = make_dummy_object_sdk(); -const nc_lifecycle = new NCLifecycle(config_fs); const key = 'obj1.txt'; const data = crypto.randomBytes(100); @@ -90,7 +87,7 @@ describe('delete_multiple_objects + filter', () => { it('delete_multiple_objects - filter should fail on wrong prefix - versioning DISABLED bucket', async () => { const data_buffer = buffer_utils.buffer_to_read_stream(data); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { prefix: 'd' }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { prefix: 'd' }, expiration: 0 }); const delete_res = await nsfs.delete_multiple_objects({ objects: [{ key }], filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res); }); @@ -98,7 +95,7 @@ describe('delete_multiple_objects + filter', () => { it('delete_multiple_objects - filter should fail on wrong object_size_less_than - versioning DISABLED bucket', async () => { const data_buffer = buffer_utils.buffer_to_read_stream(data); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { object_size_less_than: 99 }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { object_size_less_than: 99 }, expiration: 0 }); const delete_res = await nsfs.delete_multiple_objects({ objects: [{ key }], filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res); }); @@ -106,7 +103,7 @@ describe('delete_multiple_objects + filter', () => { it('delete_multiple_objects - filter should fail on wrong object_size_greater_than - versioning DISABLED bucket', async () => { const data_buffer = buffer_utils.buffer_to_read_stream(data); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { object_size_greater_than: 101 }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { object_size_greater_than: 101 }, expiration: 0 }); const delete_res = await nsfs.delete_multiple_objects({ objects: [{ key }], filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res); }); @@ -114,7 +111,7 @@ describe('delete_multiple_objects + filter', () => { it('delete_multiple_objects - filter should fail on wrong tags - versioning DISABLED bucket', async () => { const data_buffer = buffer_utils.buffer_to_read_stream(data); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { tags: [{ key: 'a', value: 'b'}] }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { tags: [{ key: 'a', value: 'b'}] }, expiration: 0 }); const delete_res = await nsfs.delete_multiple_objects({ objects: [{ key }], filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res); }); @@ -122,7 +119,7 @@ describe('delete_multiple_objects + filter', () => { it('delete_multiple_objects - filter should fail on expiration - versioning DISABLED bucket', async () => { const data_buffer = buffer_utils.buffer_to_read_stream(data); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ expiration: 5 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ expiration: 5 }); const delete_res = await nsfs.delete_multiple_objects({ objects: [{ key }], filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res); }); @@ -133,7 +130,7 @@ describe('delete_multiple_objects + filter', () => { it('delete_multiple_objects - filter should pass on wrong prefix - versioning DISABLED bucket', async () => { const data_buffer = buffer_utils.buffer_to_read_stream(data); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { prefix: 'ob' }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { prefix: 'ob' }, expiration: 0 }); const delete_res = await nsfs.delete_multiple_objects({ objects: [{ key }], filter_func }, dummy_object_sdk); await assert_object_deleted(delete_res); }); @@ -141,7 +138,7 @@ describe('delete_multiple_objects + filter', () => { it('delete_multiple_objects - filter should pass on wrong object_size_less_than - versioning DISABLED bucket', async () => { const data_buffer = buffer_utils.buffer_to_read_stream(data); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { object_size_less_than: 101 }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { object_size_less_than: 101 }, expiration: 0 }); const delete_res = await nsfs.delete_multiple_objects({ objects: [{ key }], filter_func }, dummy_object_sdk); await assert_object_deleted(delete_res); }); @@ -149,7 +146,7 @@ describe('delete_multiple_objects + filter', () => { it('delete_multiple_objects - filter should pass on wrong object_size_greater_than - versioning DISABLED bucket', async () => { const data_buffer = buffer_utils.buffer_to_read_stream(data); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { object_size_greater_than: 1 }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { object_size_greater_than: 1 }, expiration: 0 }); const delete_res = await nsfs.delete_multiple_objects({ objects: [{ key }], filter_func }, dummy_object_sdk); await assert_object_deleted(delete_res); }); @@ -158,7 +155,7 @@ describe('delete_multiple_objects + filter', () => { const data_buffer = buffer_utils.buffer_to_read_stream(data); const tagging = [{ key: 'a', value: 'b' }]; await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer, tagging }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { tags: tagging }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { tags: tagging }, expiration: 0 }); const delete_res = await nsfs.delete_multiple_objects({ objects: [{ key }], filter_func }, dummy_object_sdk); await assert_object_deleted(delete_res); }); @@ -166,7 +163,7 @@ describe('delete_multiple_objects + filter', () => { it('delete_multiple_objects - filter should pass on expiration - versioning DISABLED bucket', async () => { const data_buffer = buffer_utils.buffer_to_read_stream(data); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ expiration: 0 }); const delete_res = await nsfs.delete_multiple_objects({ objects: [{ key }], filter_func }, dummy_object_sdk); await assert_object_deleted(delete_res); }); @@ -178,7 +175,7 @@ describe('delete_multiple_objects + filter', () => { nsfs.versioning = 'ENABLED'; const data_buffer = buffer_utils.buffer_to_read_stream(data); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { prefix: 'd' }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { prefix: 'd' }, expiration: 0 }); const delete_res = await nsfs.delete_multiple_objects({ objects: [{ key }], filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res); }); @@ -187,7 +184,7 @@ describe('delete_multiple_objects + filter', () => { nsfs.versioning = 'ENABLED'; const data_buffer = buffer_utils.buffer_to_read_stream(data); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { object_size_less_than: 99 }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { object_size_less_than: 99 }, expiration: 0 }); const delete_res = await nsfs.delete_multiple_objects({ objects: [{ key }], filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res); }); @@ -196,7 +193,7 @@ describe('delete_multiple_objects + filter', () => { nsfs.versioning = 'ENABLED'; const data_buffer = buffer_utils.buffer_to_read_stream(data); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { object_size_greater_than: 101 }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { object_size_greater_than: 101 }, expiration: 0 }); const delete_res = await nsfs.delete_multiple_objects({ objects: [{ key }], filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res); }); @@ -205,7 +202,7 @@ describe('delete_multiple_objects + filter', () => { nsfs.versioning = 'ENABLED'; const data_buffer = buffer_utils.buffer_to_read_stream(data); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { tags: [{ key: 'a', value: 'b'}] }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { tags: [{ key: 'a', value: 'b'}] }, expiration: 0 }); const delete_res = await nsfs.delete_multiple_objects({ objects: [{ key }], filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res); }); @@ -214,7 +211,7 @@ describe('delete_multiple_objects + filter', () => { nsfs.versioning = 'ENABLED'; const data_buffer = buffer_utils.buffer_to_read_stream(data); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ expiration: 5 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ expiration: 5 }); const delete_res = await nsfs.delete_multiple_objects({ objects: [{ key }], filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res); }); @@ -226,7 +223,7 @@ describe('delete_multiple_objects + filter', () => { nsfs.versioning = 'ENABLED'; const data_buffer = buffer_utils.buffer_to_read_stream(data); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { prefix: 'ob' }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { prefix: 'ob' }, expiration: 0 }); const delete_res = await nsfs.delete_multiple_objects({ objects: [{ key }], filter_func }, dummy_object_sdk); await assert_object_deleted(delete_res, { should_create_a_delete_marker: true }); }); @@ -235,7 +232,7 @@ describe('delete_multiple_objects + filter', () => { nsfs.versioning = 'ENABLED'; const data_buffer = buffer_utils.buffer_to_read_stream(data); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { object_size_less_than: 101 }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { object_size_less_than: 101 }, expiration: 0 }); const delete_res = await nsfs.delete_multiple_objects({ objects: [{ key }], filter_func }, dummy_object_sdk); await assert_object_deleted(delete_res, { should_create_a_delete_marker: true }); }); @@ -244,7 +241,7 @@ describe('delete_multiple_objects + filter', () => { nsfs.versioning = 'ENABLED'; const data_buffer = buffer_utils.buffer_to_read_stream(data); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { object_size_greater_than: 1 }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { object_size_greater_than: 1 }, expiration: 0 }); const delete_res = await nsfs.delete_multiple_objects({ objects: [{ key }], filter_func }, dummy_object_sdk); await assert_object_deleted(delete_res, { should_create_a_delete_marker: true }); }); @@ -254,7 +251,7 @@ describe('delete_multiple_objects + filter', () => { const data_buffer = buffer_utils.buffer_to_read_stream(data); const tagging = [{ key: 'a', value: 'b' }]; await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer, tagging }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { tags: tagging }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { tags: tagging }, expiration: 0 }); const delete_res = await nsfs.delete_multiple_objects({ objects: [{ key }], filter_func }, dummy_object_sdk); await assert_object_deleted(delete_res, { should_create_a_delete_marker: true }); }); @@ -263,7 +260,7 @@ describe('delete_multiple_objects + filter', () => { nsfs.versioning = 'ENABLED'; const data_buffer = buffer_utils.buffer_to_read_stream(data); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ expiration: 0 }); const delete_res = await nsfs.delete_multiple_objects({ objects: [{ key }], filter_func }, dummy_object_sdk); await assert_object_deleted(delete_res, { should_create_a_delete_marker: true }); }); @@ -275,7 +272,7 @@ describe('delete_multiple_objects + filter', () => { nsfs.versioning = 'ENABLED'; const data_buffer = buffer_utils.buffer_to_read_stream(data); const upload_res = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { prefix: 'd' }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { prefix: 'd' }, expiration: 0 }); const objects_to_delete = [{ key, version_id: upload_res.version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res); @@ -285,7 +282,7 @@ describe('delete_multiple_objects + filter', () => { nsfs.versioning = 'ENABLED'; const data_buffer = buffer_utils.buffer_to_read_stream(data); const upload_res = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { object_size_less_than: 99 }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { object_size_less_than: 99 }, expiration: 0 }); const objects_to_delete = [{ key, version_id: upload_res.version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res); @@ -295,7 +292,7 @@ describe('delete_multiple_objects + filter', () => { nsfs.versioning = 'ENABLED'; const data_buffer = buffer_utils.buffer_to_read_stream(data); const upload_res = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { object_size_greater_than: 101 }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { object_size_greater_than: 101 }, expiration: 0 }); const objects_to_delete = [{ key, version_id: upload_res.version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res); @@ -305,7 +302,7 @@ describe('delete_multiple_objects + filter', () => { nsfs.versioning = 'ENABLED'; const data_buffer = buffer_utils.buffer_to_read_stream(data); const upload_res = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { tags: [{ key: 'a', value: 'b'}] }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { tags: [{ key: 'a', value: 'b'}] }, expiration: 0 }); const objects_to_delete = [{ key, version_id: upload_res.version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res); @@ -315,7 +312,7 @@ describe('delete_multiple_objects + filter', () => { nsfs.versioning = 'ENABLED'; const data_buffer = buffer_utils.buffer_to_read_stream(data); const upload_res = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ expiration: 5 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ expiration: 5 }); const objects_to_delete = [{ key, version_id: upload_res.version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res); @@ -329,7 +326,7 @@ describe('delete_multiple_objects + filter', () => { nsfs.versioning = 'ENABLED'; const data_buffer = buffer_utils.buffer_to_read_stream(data); const upload_res = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { prefix: 'ob' }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { prefix: 'ob' }, expiration: 0 }); const objects_to_delete = [{ key, version_id: upload_res.version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deleted(delete_res); @@ -339,7 +336,7 @@ describe('delete_multiple_objects + filter', () => { nsfs.versioning = 'ENABLED'; const data_buffer = buffer_utils.buffer_to_read_stream(data); const upload_res = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { object_size_less_than: 101 }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { object_size_less_than: 101 }, expiration: 0 }); const objects_to_delete = [{ key, version_id: upload_res.version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deleted(delete_res); @@ -349,7 +346,7 @@ describe('delete_multiple_objects + filter', () => { nsfs.versioning = 'ENABLED'; const data_buffer = buffer_utils.buffer_to_read_stream(data); const upload_res = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { object_size_greater_than: 1 }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { object_size_greater_than: 1 }, expiration: 0 }); const objects_to_delete = [{ key, version_id: upload_res.version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deleted(delete_res); @@ -361,7 +358,7 @@ describe('delete_multiple_objects + filter', () => { const tagging = [{ key: 'a', value: 'b' }]; const upload_res = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer, tagging }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { tags: tagging }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { tags: tagging }, expiration: 0 }); const objects_to_delete = [{ key, version_id: upload_res.version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deleted(delete_res); @@ -371,7 +368,7 @@ describe('delete_multiple_objects + filter', () => { nsfs.versioning = 'ENABLED'; const data_buffer = buffer_utils.buffer_to_read_stream(data); const upload_res = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ expiration: 0 }); const objects_to_delete = [{ key, version_id: upload_res.version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deleted(delete_res); @@ -386,7 +383,7 @@ describe('delete_multiple_objects + filter', () => { const data_buffer2 = buffer_utils.buffer_to_read_stream(data); const upload_res1 = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer1 }, dummy_object_sdk); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer2 }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { prefix: 'd' }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { prefix: 'd' }, expiration: 0 }); const objects_to_delete = [{ key, version_id: upload_res1.version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res); @@ -398,7 +395,7 @@ describe('delete_multiple_objects + filter', () => { const data_buffer2 = buffer_utils.buffer_to_read_stream(data); const upload_res1 = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer1 }, dummy_object_sdk); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer2 }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { object_size_less_than: 99 }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { object_size_less_than: 99 }, expiration: 0 }); const objects_to_delete = [{ key, version_id: upload_res1.version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res); @@ -410,7 +407,7 @@ describe('delete_multiple_objects + filter', () => { const data_buffer2 = buffer_utils.buffer_to_read_stream(data); const upload_res1 = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer1 }, dummy_object_sdk); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer2 }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { object_size_greater_than: 101 }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { object_size_greater_than: 101 }, expiration: 0 }); const objects_to_delete = [{ key, version_id: upload_res1.version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res); @@ -422,7 +419,7 @@ describe('delete_multiple_objects + filter', () => { const data_buffer2 = buffer_utils.buffer_to_read_stream(data); const upload_res1 = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer1 }, dummy_object_sdk); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer2 }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { tags: [{ key: 'a', value: 'b'}] }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { tags: [{ key: 'a', value: 'b'}] }, expiration: 0 }); const objects_to_delete = [{ key, version_id: upload_res1.version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res); @@ -434,7 +431,7 @@ describe('delete_multiple_objects + filter', () => { const data_buffer2 = buffer_utils.buffer_to_read_stream(data); const upload_res1 = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer1 }, dummy_object_sdk); await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer2 }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ expiration: 5 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ expiration: 5 }); const objects_to_delete = [{ key, version_id: upload_res1.version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res); @@ -449,7 +446,7 @@ describe('delete_multiple_objects + filter', () => { const data_buffer2 = buffer_utils.buffer_to_read_stream(data); const upload_res1 = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer1 }, dummy_object_sdk); const upload_res2 = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer2 }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { prefix: 'ob' }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { prefix: 'ob' }, expiration: 0 }); const objects_to_delete = [{ key, version_id: upload_res1.version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deleted(delete_res, { new_latest_version: upload_res2.version_id}); @@ -461,7 +458,7 @@ describe('delete_multiple_objects + filter', () => { const data_buffer2 = buffer_utils.buffer_to_read_stream(data); const upload_res1 = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer1 }, dummy_object_sdk); const upload_res2 = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer2 }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { object_size_less_than: 101 }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { object_size_less_than: 101 }, expiration: 0 }); const objects_to_delete = [{ key, version_id: upload_res1.version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deleted(delete_res, { new_latest_version: upload_res2.version_id}); @@ -473,7 +470,7 @@ describe('delete_multiple_objects + filter', () => { const data_buffer2 = buffer_utils.buffer_to_read_stream(data); const upload_res1 = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer1 }, dummy_object_sdk); const upload_res2 = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer2 }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { object_size_greater_than: 1 }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { object_size_greater_than: 1 }, expiration: 0 }); const objects_to_delete = [{ key, version_id: upload_res1.version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deleted(delete_res, { new_latest_version: upload_res2.version_id}); @@ -486,7 +483,7 @@ describe('delete_multiple_objects + filter', () => { const data_buffer2 = buffer_utils.buffer_to_read_stream(data); const upload_res1 = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer1 }, dummy_object_sdk); const upload_res2 = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer2 }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { tags: tagging }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { tags: tagging }, expiration: 0 }); const objects_to_delete = [{ key, version_id: upload_res1.version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deleted(delete_res, { new_latest_version: upload_res2.version_id}); @@ -498,7 +495,7 @@ describe('delete_multiple_objects + filter', () => { const data_buffer2 = buffer_utils.buffer_to_read_stream(data); const upload_res1 = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer1 }, dummy_object_sdk); const upload_res2 = await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer2 }, dummy_object_sdk); - const filter_func = nc_lifecycle._build_lifecycle_filter({ expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ expiration: 0 }); const objects_to_delete = [{ key, version_id: upload_res1.version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deleted(delete_res, { new_latest_version: upload_res2.version_id}); @@ -513,7 +510,7 @@ describe('delete_multiple_objects + filter', () => { await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer1 }, dummy_object_sdk); const delete_res1 = await nsfs.delete_object({ bucket: bucket_name, key: key }, dummy_object_sdk); expect(delete_res1.created_delete_marker).toBe(true); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { prefix: 'd' }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { prefix: 'd' }, expiration: 0 }); const objects_to_delete = [{ key, version_id: delete_res1.created_version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res, { latest_delete_marker: true }); @@ -525,7 +522,7 @@ describe('delete_multiple_objects + filter', () => { await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer1 }, dummy_object_sdk); const delete_res1 = await nsfs.delete_object({ bucket: bucket_name, key: key }, dummy_object_sdk); expect(delete_res1.created_delete_marker).toBe(true); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { object_size_greater_than: 101 }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { object_size_greater_than: 101 }, expiration: 0 }); const objects_to_delete = [{ key, version_id: delete_res1.created_version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res, { latest_delete_marker: true }); @@ -537,7 +534,7 @@ describe('delete_multiple_objects + filter', () => { await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer1 }, dummy_object_sdk); const delete_res1 = await nsfs.delete_object({ bucket: bucket_name, key: key }, dummy_object_sdk); expect(delete_res1.created_delete_marker).toBe(true); - const filter_func = nc_lifecycle._build_lifecycle_filter({ filter: { tags: [{ key: 'a', value: 'b' }] }, expiration: 0 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ filter: { tags: [{ key: 'a', value: 'b' }] }, expiration: 0 }); const objects_to_delete = [{ key, version_id: delete_res1.created_version_id }]; const delete_res = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res, { latest_delete_marker: true }); @@ -549,7 +546,7 @@ describe('delete_multiple_objects + filter', () => { await nsfs.upload_object({ bucket: bucket_name, key: key, source_stream: data_buffer1 }, dummy_object_sdk); const delete_res1 = await nsfs.delete_object({ bucket: bucket_name, key: key }, dummy_object_sdk); expect(delete_res1.created_delete_marker).toBe(true); - const filter_func = nc_lifecycle._build_lifecycle_filter({ expiration: 5 }); + const filter_func = lifecycle_utils.build_lifecycle_filter({ expiration: 5 }); const objects_to_delete = [{ key, version_id: delete_res1.created_version_id }]; const delete_res2 = await nsfs.delete_multiple_objects({ objects: objects_to_delete, filter_func }, dummy_object_sdk); await assert_object_deletion_failed(delete_res2, { latest_delete_marker: true }); diff --git a/src/test/unit_tests/test_lifecycle.js b/src/test/unit_tests/test_lifecycle.js index 512e790fef..3a5127480e 100644 --- a/src/test/unit_tests/test_lifecycle.js +++ b/src/test/unit_tests/test_lifecycle.js @@ -799,6 +799,125 @@ mocha.describe('lifecycle', () => { } }); + mocha.describe('bucket-lifecycle-expiration-header', function() { + const bucket = Bucket; + + const run_expiration_test = async ({ rules, expected_id, expected_days, key, tagging = undefined, size = 1000}) => { + const putLifecycleParams = { + Bucket: bucket, + LifecycleConfiguration: { Rules: rules } + }; + await s3.putBucketLifecycleConfiguration(putLifecycleParams); + + const putObjectParams = { + Bucket: bucket, + Key: key, + Body: 'x'.repeat(size) // default 1KB if size not specified + }; + if (tagging) { + putObjectParams.Tagging = tagging; + } + const start_time = new Date(); + let res = await s3.putObject(putObjectParams); + assert.ok(res.Expiration, 'expiration header missing in putObject response'); + + res = await s3.headObject({ Bucket: bucket, Key: key }); + assert.ok(res.Expiration, 'expiration header missing in headObject response'); + + const valid = validate_expiration_header(res.Expiration, start_time, expected_id, expected_days); + assert.ok(valid, `expected rule ${expected_id} to match`); + }; + + function generate_rule(id, prefix, tags, size_gt, size_lt, expiration_days) { + const filters = {}; + if (prefix) filters.Prefix = prefix; + if (Array.isArray(tags) && tags.length) filters.Tags = tags; + if (size_gt !== undefined) filters.ObjectSizeGreaterThan = size_gt; + if (size_lt !== undefined) filters.ObjectSizeLessThan = size_lt; + + const filter = Object.keys(filters).length > 1 ? { And: filters } : filters; + + return { + ID: id, + Status: 'Enabled', + Filter: filter, + Expiration: { Days: expiration_days }, + }; + } + + function validate_expiration_header(expiration_header, start_time, expected_rule_id, delta_days) { + const match = expiration_header.match(/expiry-date="(.+)", rule-id="(.+)"/); + if (!match) return false; + console.log("match: ", match); + + const [, expiry_str, rule_id] = match; + const expiration = new Date(expiry_str); + const start = new Date(start_time); + start.setUTCHours(0, 0, 0, 0); // adjusting to midnight UTC otherwise the tests will fail - fix for ceph-s3 tests + + const days_diff = Math.floor((expiration.getTime() - start.getTime()) / (24 * 60 * 60 * 1000)); + + return days_diff === delta_days && rule_id === expected_rule_id; + } + + mocha.it('should select rule with longest prefix', async () => { + const rules = [ + generate_rule('short-prefix', 'test1/', [], undefined, undefined, 10), + generate_rule('long-prefix', 'test1/logs/', [], undefined, undefined, 17), + ]; + await run_expiration_test({ + rules, + key: 'test1/logs//file.txt', + expected_id: 'long-prefix', + expected_days: 17 + }); + }); + + mocha.it('should select rule with more tags when prefix is same', async () => { + const rules = [ + generate_rule('one-tag', 'test2/', [{ Key: 'env', Value: 'prod' }], undefined, undefined, 5), + generate_rule('two-tags', 'test2/', [ + { Key: 'env', Value: 'prod' }, + { Key: 'team', Value: 'backend' } + ], undefined, undefined, 9), + ]; + await run_expiration_test({ + rules, + key: 'test2/file2.txt', + tagging: 'env=prod&team=backend', + expected_id: 'two-tags', + expected_days: 9 + }); + }); + + mocha.it('should select rule with narrower size span when prefix and tags are matching', async () => { + const rules = [ + generate_rule('wide-range', 'test3/', [], 100, 10000, 4), + generate_rule('narrow-range', 'test3/', [], 1000, 5000, 6), + ]; + await run_expiration_test({ + rules, + key: 'test3/file3.txt', + size: 1500, + expected_id: 'narrow-range', + expected_days: 6 + }); + }); + + mocha.it('should fallback to first matching rule if all filters are equal', async () => { + const rules = [ + generate_rule('rule-a', 'test4/', [], 0, 10000, 7), + generate_rule('rule-b', 'test4/', [], 0, 10000, 11), + ]; + await run_expiration_test({ + rules, + key: 'test4/file4.txt', + expected_id: 'rule-a', + expected_days: 7 + }); + }); + }); + function readable_buffer(data, split = 1, finish = 'end') { const max = Math.ceil(data.length / split); let pos = 0; diff --git a/src/util/http_utils.js b/src/util/http_utils.js index 387db25a5a..670f4783da 100644 --- a/src/util/http_utils.js +++ b/src/util/http_utils.js @@ -23,6 +23,7 @@ const net_utils = require('./net_utils'); const time_utils = require('./time_utils'); const cloud_utils = require('./cloud_utils'); const ssl_utils = require('../util/ssl_utils'); +const lifecycle_utils = require('../../src/util/lifecycle_utils'); const RpcError = require('../rpc/rpc_error'); const S3Error = require('../endpoint/s3/s3_errors').S3Error; @@ -664,6 +665,27 @@ function set_amz_headers(req, res) { res.setHeader('x-amz-id-2', req.request_id); } +/** + * set_expiration_header sets the `x-amz-expiration` response header for GET, PUT, or HEAD object requests + * if the object matches any enabled bucket lifecycle rule + * + * @param {Object} req + * @param {http.ServerResponse} res + * @param {Object} object_info + */ +async function set_expiration_header(req, res, object_info) { + const rules = req.params.bucket && await req.object_sdk.get_bucket_lifecycle_configuration_rules({ name: req.params.bucket }); + + const matched_rule = lifecycle_utils.get_lifecycle_rule_for_object(rules, object_info); + if (matched_rule) { + const expiration_header = lifecycle_utils.build_expiration_header(matched_rule, object_info.create_time); + if (expiration_header) { + dbg.log1('set x_amz_expiration header from applied rule: ', matched_rule); + res.setHeader('x-amz-expiration', expiration_header); + } + } +} + /** * @typedef {{ * allow_origin: string; @@ -945,6 +967,7 @@ exports.set_keep_alive_whitespace_interval = set_keep_alive_whitespace_interval; exports.parse_xml_to_js = parse_xml_to_js; exports.check_headers = check_headers; exports.set_amz_headers = set_amz_headers; +exports.set_expiration_header = set_expiration_header; exports.set_cors_headers = set_cors_headers; exports.set_cors_headers_s3 = set_cors_headers_s3; exports.set_cors_headers_sts = set_cors_headers_sts; diff --git a/src/util/lifecycle_utils.js b/src/util/lifecycle_utils.js index dc7d22434c..239ea8116e 100644 --- a/src/util/lifecycle_utils.js +++ b/src/util/lifecycle_utils.js @@ -69,7 +69,174 @@ function file_matches_filter({obj_info, filter_func = undefined}) { return true; } +/** + * get_lifecycle_rule_for_object determines the most specific matching lifecycle rule for the given object metadata + * + * @param {Array} rules + * @param {Object} object_info + * @returns {Object|undefined} + */ +function get_lifecycle_rule_for_object(rules, object_info) { + if (!object_info?.key || !Array.isArray(rules) || rules.length < 1) return; + + let matched_rule; + let curr_priority = { + prefix_len: -1, + tag_count: -1, + size_span: Infinity, + }; + + for (const rule of rules) { + if (rule?.status !== 'Enabled') continue; + + const filter_func = build_lifecycle_filter(rule); + if (!filter_func(object_info)) continue; + + const new_priority = get_rule_priority(rule.filter); + + if (compare_rule_priority(curr_priority, new_priority)) { + matched_rule = rule; + curr_priority = new_priority; + } + } + return matched_rule; +} + +/** + * build_expiration_header converts an expiration rule (either with `date` or `days`) + * into an s3 style `x-amz-expiration` header value + * + * @param {Object} rule + * @param {Object} create_time + * @returns {string|undefined} + * + * Example output: + * expiry-date="Thu, 10 Apr 2025 00:00:00 GMT", rule-id="rule_id" + */ +function build_expiration_header(rule, create_time) { + const expiration = rule.expiration; + const rule_id = rule.id; + + if (!expiration || (!expiration.date && !expiration.days)) return undefined; + + const expiration_date = expiration.date ? + new Date(expiration.date) : + new Date(create_time + expiration.days * 24 * 60 * 60 * 1000); + + expiration_date.setUTCHours(0, 0, 0, 0); // adjust expiration to midnight UTC + + return `expiry-date="${expiration_date.toUTCString()}", rule-id="${rule_id}"`; +} + +////////////////// +// FILTERS HELPERS // +////////////////// + +/** + * @typedef {{ + * filter: Object + * expiration: Number + * }} filter_params + * + * builds lifecycle filter function + * + * @param {filter_params} params + * @returns + */ +function build_lifecycle_filter(params) { + /** + * @param {Object} object_info + */ + return function(object_info) { + if (params.filter?.prefix && !object_info.key.startsWith(params.filter.prefix)) return false; + if (params.expiration && object_info.age < params.expiration) return false; + if (params.filter?.tags && !file_contain_tags(object_info, params.filter.tags)) return false; + if (params.filter?.object_size_greater_than && object_info.size < params.filter.object_size_greater_than) return false; + if (params.filter?.object_size_less_than && object_info.size > params.filter.object_size_less_than) return false; + return true; + }; +} + +/** + * get_rule_priority calculates the priority of a lifecycle rule's filter + * + * @param {Object} filter + * @returns {Object} priority object + */ +function get_rule_priority(filter) { + return { + prefix_len: (filter?.prefix || '').length, + tag_count: Array.isArray(filter?.tags) ? filter.tags.length : 0, + size_span: (filter?.object_size_less_than ?? Infinity) - (filter?.object_size_greater_than ?? 0) + }; +} + +/** + * compare_rule_priority determines if a new rule has higher priority + * + * priority is based on: + * - longest matching prefix + * - most matching tags + * - narrowest object size range + * + * @param {Object} curr_priority + * @param {Object} new_priority + * @returns {boolean} + */ +function compare_rule_priority(curr_priority, new_priority) { + // compare prefix length + if (new_priority.prefix_len > curr_priority.prefix_len) return true; + + if (new_priority.prefix_len === curr_priority.prefix_len) { + // compare tag count (if prefixes are equal) + if (new_priority.tag_count > curr_priority.tag_count) return true; + + if (new_priority.tag_count === curr_priority.tag_count) { + // compare size span (if prefixes and tags are equal) + if (new_priority.size_span < curr_priority.size_span) return true; + } + } + + return false; +} + +////////////////// +// TAGS HELPERS // +////////////////// + +/** + * checks if tag query_tag is in the list tag_set + * @param {Object} query_tag + * @param {Array} tag_set + */ +function list_contain_tag(query_tag, tag_set) { + for (const t of tag_set) { + if (t.key === query_tag.key && t.value === query_tag.value) return true; + } + return false; +} + +/** + * checks if object has all the tags in filter_tags + * @param {Object} object_info + * @param {Array} filter_tags + * @returns + */ +function file_contain_tags(object_info, filter_tags) { + const object_tags = object_info.tags || object_info.tagging; + if (!object_tags) return false; + for (const tag of filter_tags) { + if (!list_contain_tag(tag, object_tags)) { + return false; + } + } + return true; +} + exports.get_latest_nc_lifecycle_run_status = get_latest_nc_lifecycle_run_status; exports.file_matches_filter = file_matches_filter; exports.get_lifecycle_object_info_for_filter = get_lifecycle_object_info_for_filter; exports.get_file_age_days = get_file_age_days; +exports.get_lifecycle_rule_for_object = get_lifecycle_rule_for_object; +exports.build_expiration_header = build_expiration_header; +exports.build_lifecycle_filter = build_lifecycle_filter;