From f2f499b7b1d4eb72f5e68e3af2349e6c867f128b Mon Sep 17 00:00:00 2001 From: Ashish Pandey Date: Tue, 19 Nov 2024 09:10:07 +0530 Subject: [PATCH] Implement ObjectID to replace mongodb We need to remove code related to mongodb as we are using postgrace. This is to be done in steps and first step is to implement ObjectId class and replace mongodb.ObjectId calls with that class. Signed-off-by: Ashish Pandey --- src/agent/block_store_speed.js | 4 +- src/cmd/manage_nsfs.js | 6 +- src/manage_nsfs/nc_master_key_manager.js | 2 +- src/nc/nc_utils.js | 7 +- src/sdk/bucketspace_fs.js | 4 +- src/sdk/map_api_types.js | 20 +- src/sdk/nb.d.ts | 20 +- .../analytic_services/activity_log_store.js | 4 +- .../analytic_services/history_data_store.js | 5 +- src/server/func_services/func_stats_store.js | 5 +- src/server/func_services/func_store.js | 6 +- src/server/node_services/node_allocator.js | 12 +- src/server/node_services/nodes_store.js | 5 +- src/server/notifications/alerts_log_store.js | 14 +- src/server/object_services/map_builder.js | 4 +- src/server/object_services/map_db_types.js | 6 +- src/server/object_services/map_reader.js | 4 +- src/server/object_services/map_server.js | 16 +- src/server/object_services/mapper.js | 12 +- src/server/object_services/md_store.js | 21 +- src/server/object_services/object_server.js | 6 +- .../system_services/config_file_store.js | 5 +- .../system_services/master_key_manager.js | 2 +- .../system_services/replication_store.js | 5 +- src/server/system_services/system_store.js | 2 +- src/test/unit_tests/coretest.js | 1 + .../test_nc_master_keys_exec.test.js | 2 +- .../test_nc_nsfs_account_cli.test.js | 1 + .../awscli/awscli_iwgdisgt.sreq | 33 +- .../unit_tests/test_agent_blocks_reclaimer.js | 24 +- .../unit_tests/test_agent_blocks_verifier.js | 29 +- src/test/unit_tests/test_lifecycle.js | 5 +- src/test/unit_tests/test_map_client.js | 10 +- src/test/unit_tests/test_map_reader.js | 7 +- src/test/unit_tests/test_mapper.js | 30 +- src/test/unit_tests/test_md_store.js | 23 +- src/test/unit_tests/test_schema_keywords.js | 4 +- src/test/unit_tests/test_tiering_upload.js | 2 +- src/tools/mapper_speed.js | 42 +- src/tools/md_blow.js | 2 +- src/upgrade/migrator.js | 2 +- src/util/db_client.js | 7 +- src/util/fnv1a.js | 47 + src/util/long.js | 852 ++++++++++++++++++ src/util/mongo_client.js | 4 +- src/util/mongo_utils.js | 4 +- src/util/objectid.js | 130 +++ src/util/objectidtemp.js | 152 ++++ src/util/postgres_client.js | 29 +- src/util/schema_keywords.js | 10 +- 50 files changed, 1421 insertions(+), 228 deletions(-) create mode 100644 src/util/fnv1a.js create mode 100644 src/util/long.js create mode 100644 src/util/objectid.js create mode 100644 src/util/objectidtemp.js diff --git a/src/agent/block_store_speed.js b/src/agent/block_store_speed.js index b3580732ee..6bdc8528ae 100644 --- a/src/agent/block_store_speed.js +++ b/src/agent/block_store_speed.js @@ -4,7 +4,6 @@ // const _ = require('lodash'); const argv = require('minimist')(process.argv); const cluster = require('cluster'); -const mongodb = require('mongodb'); const api = require('../api'); const config = require('../../config'); @@ -12,6 +11,7 @@ const dotenv = require('../util/dotenv'); const Speedometer = require('../util/speedometer'); const { RPC_BUFFERS } = require('../rpc'); +const ObjectID = require('../util/objectid.js'); dotenv.load(); argv.email = argv.email || 'demo@noobaa.com'; @@ -60,7 +60,7 @@ async function worker(client) { } async function write_block(client) { - const block_id = new mongodb.ObjectId(); + const block_id = (new ObjectID(null)).toString(); return client.block_store.write_block({ [RPC_BUFFERS]: { data: Buffer.allocUnsafe(argv.size) }, block_md: { diff --git a/src/cmd/manage_nsfs.js b/src/cmd/manage_nsfs.js index 2f5766679c..10f60a473f 100644 --- a/src/cmd/manage_nsfs.js +++ b/src/cmd/manage_nsfs.js @@ -11,7 +11,7 @@ const nb_native = require('../util/nb_native'); const { ConfigFS } = require('../sdk/config_fs'); const cloud_utils = require('../util/cloud_utils'); const native_fs_utils = require('../util/native_fs_utils'); -const mongo_utils = require('../util/mongo_utils'); +const ObjectID = require('../util/objectid.js'); const SensitiveString = require('../util/sensitive_string'); const { account_id_cache } = require('../sdk/accountspace_fs'); const ManageCLIError = require('../manage_nsfs/manage_nsfs_cli_errors').ManageCLIError; @@ -169,7 +169,7 @@ async function merge_new_and_existing_config_data(user_input_bucket_data) { * @returns { Promise<{ code: ManageCLIResponse.BucketCreated, detail: Object, event_arg: Object }>} */ async function add_bucket(data) { - data._id = mongo_utils.mongoObjectId(); + data._id = (new ObjectID(null)).toString(); const parsed_bucket_data = await config_fs.create_bucket_config_file(data); await set_bucker_owner(parsed_bucket_data); return { code: ManageCLIResponse.BucketCreated, detail: parsed_bucket_data, event_arg: { bucket: data.name }}; @@ -413,7 +413,7 @@ async function fetch_existing_account_data(action, target, decrypt_secret_key) { * @returns { Promise<{ code: typeof ManageCLIResponse.AccountCreated, detail: Object, event_arg: Object }>} */ async function add_account(data) { - data._id = mongo_utils.mongoObjectId(); + data._id = (new ObjectID(null)).toString(); await config_fs.create_account_config_file(data); return { code: ManageCLIResponse.AccountCreated, detail: data, event_arg: { account: data.name } }; } diff --git a/src/manage_nsfs/nc_master_key_manager.js b/src/manage_nsfs/nc_master_key_manager.js index dff0760de6..94ef3b7a56 100644 --- a/src/manage_nsfs/nc_master_key_manager.js +++ b/src/manage_nsfs/nc_master_key_manager.js @@ -109,7 +109,7 @@ class NCMasterKeysManager { */ async _create_master_key() { const master_key = { - id: db_client.new_object_id(), + id: db_client.new_object_id().toString(), cipher_key: crypto.randomBytes(32), cipher_iv: crypto.randomBytes(16), encryption_type: 'aes-256-gcm' diff --git a/src/nc/nc_utils.js b/src/nc/nc_utils.js index 74f6ac40fa..d80d6e2754 100644 --- a/src/nc/nc_utils.js +++ b/src/nc/nc_utils.js @@ -1,17 +1,16 @@ /* Copyright (C) 2024 NooBaa */ 'use strict'; -const mongo_utils = require('../util/mongo_utils'); - +const objectid = require('../util/objectid.js'); /** * generate_id will generate an id that we use to identify entities (such as account, bucket, etc.). */ // TODO: // - reuse this function in NC NSFS where we used the mongo_utils module -// - this function implantation should be db_client.new_object_id(), +// - this function implantation should be db_client.new_object_id().toString(), // but to align with manage nsfs we won't change it now function generate_id() { - return mongo_utils.mongoObjectId(); + return objectid(); } /** diff --git a/src/sdk/bucketspace_fs.js b/src/sdk/bucketspace_fs.js index e3d3788b2b..ae36c11ff7 100644 --- a/src/sdk/bucketspace_fs.js +++ b/src/sdk/bucketspace_fs.js @@ -1,6 +1,7 @@ /* Copyright (C) 2020 NooBaa */ 'use strict'; +const objectid = require('../util/objectid.js'); const _ = require('lodash'); const util = require('util'); const path = require('path'); @@ -9,7 +10,6 @@ const config = require('../../config'); const RpcError = require('../rpc/rpc_error'); const js_utils = require('../util/js_utils'); const nb_native = require('../util/nb_native'); -const mongo_utils = require('../util/mongo_utils'); const KeysSemaphore = require('../util/keys_semaphore'); const { get_umasked_mode, @@ -314,7 +314,7 @@ class BucketSpaceFS extends BucketSpaceSimpleFS { new_bucket_defaults(account, { name, tag, lock_enabled, force_md5_etag }, create_uls, bucket_storage_path) { return { - _id: mongo_utils.mongoObjectId(), + _id: objectid(), name, tag: js_utils.default_value(tag, undefined), owner_account: account._id, diff --git a/src/sdk/map_api_types.js b/src/sdk/map_api_types.js index 0719d31125..c9978e49c2 100644 --- a/src/sdk/map_api_types.js +++ b/src/sdk/map_api_types.js @@ -77,7 +77,7 @@ class ChunkAPI { get is_building_frags() { return this.chunk_info.is_building_frags; } set is_building_frags(val) { this.chunk_info.is_building_frags = val; } get dup_chunk_id() { return parse_optional_id(this.chunk_info.dup_chunk); } - set dup_chunk_id(val) { this.chunk_info.dup_chunk = val.toHexString(); } + set dup_chunk_id(val) { this.chunk_info.dup_chunk = val; } get frags() { if (!this.__frags) { @@ -102,7 +102,7 @@ class ChunkAPI { set_new_chunk_id() { if (this._id) throw new Error(`ChunkAPI.set_new_chunk_id: unexpected call for existing chunk ${this._id}`); - this.chunk_info._id = db_client.instance().new_object_id().toHexString(); + this.chunk_info._id = db_client.instance().new_object_id().toString(); } /** @@ -112,14 +112,14 @@ class ChunkAPI { */ add_block_allocation(frag, pools, mirror) { const block_md = { - id: db_client.instance().new_object_id().toHexString(), + id: db_client.instance().new_object_id().toString(), size: this.frag_size, digest_b64: frag.digest_b64, digest_type: this.chunk_coder_config.frag_digest_type, }; if (!frag.allocations) frag.allocations = []; frag.allocations.push({ - mirror_group: mirror._id.toHexString(), + mirror_group: mirror._id, block_md, mirror, pools, @@ -238,7 +238,7 @@ class FragAPI { set allocations(val) { this.frag_info.allocations = val; } set_new_frag_id() { - this.frag_info._id = db_client.instance().new_object_id().toHexString(); + this.frag_info._id = db_client.instance().new_object_id().toString(); } /** @@ -344,8 +344,8 @@ class BlockAPI { /** @type {nb.Pool} */ const pool = this.system_store.data.systems[0].pools_by_name[node.pool]; this.node = node; - this.block_md.node = node._id.toHexString(); - this.block_md.pool = pool._id.toHexString(); + this.block_md.node = node._id; + this.block_md.pool = pool._id; this.block_md.address = node.rpc_address; this.block_md.node_type = node.node_type; const adminfo = this.block_info.adminfo; @@ -423,18 +423,18 @@ class PartAPI { set_new_part_id() { if (this._id) throw new Error(`PartAPI.set_new_part_id: already has id ${this._id}`); - this._id = db_client.instance().new_object_id(); + this._id = db_client.instance().new_object_id().toString(); } /** * @param {nb.ID} chunk_id */ - set_chunk(chunk_id) { this.part_info.chunk_id = chunk_id.toHexString(); } + set_chunk(chunk_id) { this.part_info.chunk_id = chunk_id.toString(); } /** * @param {nb.ID} obj_id */ - set_obj_id(obj_id) { this.part_info.obj_id = obj_id.toHexString(); } + set_obj_id(obj_id) { this.part_info.obj_id = obj_id.toString(); } /** @returns {nb.PartInfo} */ to_api() { diff --git a/src/sdk/nb.d.ts b/src/sdk/nb.d.ts index a98071294a..b89e4ac883 100644 --- a/src/sdk/nb.d.ts +++ b/src/sdk/nb.d.ts @@ -1,11 +1,13 @@ export as namespace nb; import * as fs from 'fs'; -import * as mongodb from 'mongodb'; import { EventEmitter } from 'events'; import { Readable, Writable } from 'stream'; import { IncomingMessage, ServerResponse } from 'http'; -import { ObjectPart, Checksum} from '@aws-sdk/client-s3'; +import { ObjectPart, Checksum } from '@aws-sdk/client-s3'; +import * as mongodb from 'mongodb'; + +import ObjectID = require("../util/objectid.js"); type Semaphore = import('../util/semaphore'); type KeysSemaphore = import('../util/keys_semaphore'); @@ -39,6 +41,8 @@ type ReplicationLogCandidates = Record }; + + interface MapByID { [id: string]: T } interface Base { @@ -46,8 +50,8 @@ interface Base { toString?(): string; } -type ID = mongodb.ObjectID; -type DBBuffer = mongodb.Binary | Buffer; +type ID = ObjectID; +type DBBuffer = Buffer; interface System extends Base { _id: ID; @@ -720,8 +724,8 @@ interface DBClient { populate(docs: object[] | object, doc_path: string, collection: DBCollection, fields: object): Promise; resolve_object_ids_recursive(idmap: object, item: object): object; resolve_object_ids_paths(idmap: object, item: object, paths: string[], allow_missing: boolean): object; - new_object_id(): mongodb.ObjectId; - parse_object_id(id_str: string): mongodb.ObjectId; + new_object_id().toString(): ObjectID; + parse_object_id(id_str: string): ObjectID; fix_id_type(doc: object[] | object): object[] | object; is_object_id(id: object[] | object): boolean; is_err_duplicate_key(err: object): boolean; @@ -824,7 +828,7 @@ interface BucketSpace { read_account_by_access_key({ access_key: string }): Promise; read_bucket_sdk_info({ name: string }): Promise; - check_same_stat(bucket_name: string, bucket_stat: nb.NativeFSStats); // only implemented in bucketspace_fs + check_same_stat(bucket_name: string, bucket_stat: nb.NativeFSStats); // only implemented in bucketspace_fs list_buckets(params: object, object_sdk: ObjectSDK): Promise; read_bucket(params: object): Promise; @@ -1150,4 +1154,4 @@ interface GetObjectAttributesParts { MaxParts?: number; IsTruncated?: boolean; Parts?: ObjectPart[]; - } \ No newline at end of file +} \ No newline at end of file diff --git a/src/server/analytic_services/activity_log_store.js b/src/server/analytic_services/activity_log_store.js index c0411730b9..edf369f264 100644 --- a/src/server/analytic_services/activity_log_store.js +++ b/src/server/analytic_services/activity_log_store.js @@ -1,10 +1,10 @@ /* Copyright (C) 2016 NooBaa */ 'use strict'; -const mongodb = require('mongodb'); const _ = require('lodash'); const db_client = require('../../util/db_client'); +const ObjectID = require('../../util/objectid.js'); const P = require('../../util/promise'); const activity_log_schema = require('./activity_log_schema'); const activity_log_indexes = require('./activity_log_indexes'); @@ -25,7 +25,7 @@ class ActivityLogStore { } make_activity_log_id(id_str) { - return new mongodb.ObjectID(id_str); + return (new ObjectID(id_str)).toString(); } diff --git a/src/server/analytic_services/history_data_store.js b/src/server/analytic_services/history_data_store.js index 3ab9068cec..6684272ed2 100644 --- a/src/server/analytic_services/history_data_store.js +++ b/src/server/analytic_services/history_data_store.js @@ -1,8 +1,6 @@ /* Copyright (C) 2016 NooBaa */ 'use strict'; -const mongodb = require('mongodb'); - // const dbg = require('../../util/debug_module')(__filename); const config = require('../../../config.js'); // const pkg = require('../../../package.json'); @@ -11,6 +9,7 @@ const P = require('../../util/promise'); const db_client = require('../../util/db_client'); const system_history_schema = require('../analytic_services/system_history_schema'); +const ObjectID = require('../../util/objectid.js'); class HistoryDataStore { constructor() { @@ -30,7 +29,7 @@ class HistoryDataStore { const time_stamp = new Date(); const record_expiration_date = new Date(time_stamp.getTime() - config.STATISTICS_COLLECTOR_EXPIRATION); const record = { - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), time_stamp, system_snapshot: item, history_type: 'SYSTEM' diff --git a/src/server/func_services/func_stats_store.js b/src/server/func_services/func_stats_store.js index 927ad0d972..fbecad53ca 100644 --- a/src/server/func_services/func_stats_store.js +++ b/src/server/func_services/func_stats_store.js @@ -2,8 +2,7 @@ 'use strict'; // const _ = require('lodash'); -const mongodb = require('mongodb'); - +const ObjectID = require('../../util/objectid.js'); // const dbg = require('../../util/debug_module')(__filename); const db_client = require('../../util/db_client'); @@ -26,7 +25,7 @@ class FuncStatsStore { } make_func_stat_id(id_str) { - return new mongodb.ObjectId(id_str); + return (new ObjectID(id_str)).toString(); } async create_func_stat(stat) { diff --git a/src/server/func_services/func_store.js b/src/server/func_services/func_store.js index 1313a8eb92..3c4c580f13 100644 --- a/src/server/func_services/func_store.js +++ b/src/server/func_services/func_store.js @@ -1,13 +1,11 @@ /* Copyright (C) 2016 NooBaa */ 'use strict'; -const mongodb = require('mongodb'); - const db_client = require('../../util/db_client'); const func_schema = require('./func_schema'); const func_indexes = require('./func_indexes'); - +const ObjectID = require('../../util/objectid.js'); class FuncStore { constructor() { @@ -24,7 +22,7 @@ class FuncStore { } make_func_id(id_str) { - return new mongodb.ObjectId(id_str); + return (new ObjectID(id_str)).toString(); } async create_func(func) { diff --git a/src/server/node_services/node_allocator.js b/src/server/node_services/node_allocator.js index 2750e2197f..80132e0cb2 100644 --- a/src/server/node_services/node_allocator.js +++ b/src/server/node_services/node_allocator.js @@ -140,7 +140,7 @@ async function refresh_tiering_alloc(tiering, force) { * @returns {Promise} */ async function refresh_pool_alloc(pool, force) { - const pool_id_str = pool._id.toHexString(); + const pool_id_str = pool._id; let group = alloc_group_by_pool[pool_id_str]; if (!group) { group = { @@ -202,7 +202,7 @@ async function refresh_tiers_alloc(tiering_list, force) { const wait_list = []; for (const tiering of tiering_list) { - const tiering_id_str = tiering._id.toHexString(); + const tiering_id_str = tiering._id; let group = alloc_group_by_tiering[tiering_id_str]; if (!group) { group = { @@ -266,10 +266,10 @@ function get_tiering_status(tiering) { /** @type {nb.TieringStatus} */ const tiering_status_by_tier = {}; if (!tiering) return tiering_status_by_tier; - const tiering_id_str = tiering._id.toHexString(); + const tiering_id_str = tiering._id; const alloc_group = alloc_group_by_tiering[tiering_id_str]; _.each(tiering.tiers, ({ tier }) => { - const tier_id_str = tier._id.toHexString(); + const tier_id_str = tier._id; const mirrors_storage = alloc_group && alloc_group.mirrors_storage_by_tier_id[tier_id_str]; let tier_pools = []; // Inside the Tier, pools are unique and we don't need to filter afterwards @@ -310,7 +310,7 @@ function _get_tier_pools_status(pools, required_valid_nodes) { } else if (num_nodes < required_valid_nodes) { valid_for_allocation = false; } - pools_status_by_id[pool._id.toHexString()] = { + pools_status_by_id[pool._id] = { valid_for_allocation, num_nodes, resource_type: pool.resource_type @@ -344,7 +344,7 @@ function allocate_node({ avoid_nodes, allocated_hosts, pools = [] }) { // Since we will merge the two groups we will eventually have two average groups // This is bad since we will have two groups with each having fast and slow drives pools.forEach(pool => { - const group = alloc_group_by_pool[pool._id.toHexString()]; + const group = alloc_group_by_pool[pool._id]; if (group && group.latency_groups) { group.latency_groups.forEach((value, index) => { if (pools_latency_groups[index]) { diff --git a/src/server/node_services/nodes_store.js b/src/server/node_services/nodes_store.js index 3b54631aa1..6c6e7ba8a0 100644 --- a/src/server/node_services/nodes_store.js +++ b/src/server/node_services/nodes_store.js @@ -2,13 +2,12 @@ 'use strict'; const _ = require('lodash'); -const mongodb = require('mongodb'); const dbg = require('../../util/debug_module')(__filename); const node_schema = require('./node_schema'); const db_client = require('../../util/db_client'); const P = require('../../util/promise'); - +const ObjectID = require('../../util/objectid.js'); class NodesStore { constructor(test_suffix = '') { @@ -24,7 +23,7 @@ class NodesStore { } make_node_id(id_str) { - return new mongodb.ObjectId(id_str); + return (new ObjectID(id_str)).toString(); } is_connected() { diff --git a/src/server/notifications/alerts_log_store.js b/src/server/notifications/alerts_log_store.js index 77d7aa915e..261a33ed3c 100644 --- a/src/server/notifications/alerts_log_store.js +++ b/src/server/notifications/alerts_log_store.js @@ -1,13 +1,15 @@ /* Copyright (C) 2016 NooBaa */ 'use strict'; -const mongodb = require('mongodb'); const _ = require('lodash'); const P = require('../../util/promise'); const db_client = require('../../util/db_client'); const alerts_log_schema = require('./alerts_log_schema'); +// @ts-ignore +const ObjectID = require('../../util/objectid.js'); + class AlertsLogStore { constructor() { @@ -22,8 +24,8 @@ class AlertsLogStore { return AlertsLogStore._instance; } - make_alert_log_id(id_str) { - return new mongodb.ObjectID(id_str); + make_alert_log_id() { + return (new ObjectID(null)).toString(); } create(alert_log) { @@ -94,12 +96,12 @@ class AlertsLogStore { let _id; if (ids) { - const obj_ids = ids.map(id => new mongodb.ObjectID(id)); + const obj_ids = ids.map(id => new ObjectID(id)); _id = { $in: obj_ids }; } else if (till) { - _id = { $lt: new mongodb.ObjectID(till) }; + _id = { $lt: new ObjectID(till) }; } else if (since) { - _id = { $gt: new mongodb.ObjectID(since) }; + _id = { $gt: new ObjectID(since) }; } return _.omitBy({ diff --git a/src/server/object_services/map_builder.js b/src/server/object_services/map_builder.js index 42cb22d466..2f7b044c2e 100644 --- a/src/server/object_services/map_builder.js +++ b/src/server/object_services/map_builder.js @@ -154,7 +154,7 @@ class MapBuilder { } }); - const chunks_to_delete_uniq = _.uniqBy(chunks_to_delete, chunk => chunk._id.toHexString()); + const chunks_to_delete_uniq = _.uniqBy(chunks_to_delete, chunk => chunk._id); dbg.log1('MapBuilder.update_db:', 'chunks_to_build', chunks_to_build.length, @@ -190,7 +190,7 @@ class MapBuilder { // const all_blocks = get_all_chunks_blocks(chunks); // await P.map(all_blocks, async block => { // const node = await nodes_client.read_node_by_id(system_store.data.systems[0]._id, - // block.node_id.toHexString()); + // block.node_id); // block.set_node(node); // }); diff --git a/src/server/object_services/map_db_types.js b/src/server/object_services/map_db_types.js index 4c03509faa..160515ab04 100644 --- a/src/server/object_services/map_db_types.js +++ b/src/server/object_services/map_db_types.js @@ -20,7 +20,7 @@ const undefined_buffer = undefined; * @returns {string | undefined} */ function optional_id_str(id) { - return id === undefined ? undefined : id.toHexString(); + return id === undefined ? undefined : id; } /** @@ -100,14 +100,14 @@ class ChunkDB { */ add_block_allocation(frag, pools, mirror) { const block_md = { - id: db_client.instance().new_object_id().toHexString(), + id: db_client.instance().new_object_id().toString(), size: this.frag_size, digest_b64: frag.digest_b64, digest_type: this.chunk_coder_config.frag_digest_type, }; if (!frag.allocations) frag.allocations = []; frag.allocations.push({ - mirror_group: mirror._id.toHexString(), + mirror_group: mirror._id, block_md, mirror, pools, diff --git a/src/server/object_services/map_reader.js b/src/server/object_services/map_reader.js index 0f6bf95469..a1ad8f62e9 100644 --- a/src/server/object_services/map_reader.js +++ b/src/server/object_services/map_reader.js @@ -89,7 +89,7 @@ async function read_object_mapping_admin(obj, skip, limit) { async function read_node_mapping(node_ids, skip, limit) { const chunk_ids = await MDStore.instance().find_blocks_chunks_by_node_ids(node_ids, skip, limit); const parts = await MDStore.instance().find_parts_by_chunk_ids(chunk_ids); - const chunks = await read_parts_mapping(_.uniqBy(parts, part => part.chunk.toHexString())); + const chunks = await read_parts_mapping(_.uniqBy(parts, part => part.chunk)); return chunks; } @@ -106,7 +106,7 @@ async function read_parts_mapping(parts, location_info) { await MDStore.instance().load_blocks_for_chunks(chunks_db, sorter); const chunks_db_by_id = _.keyBy(chunks_db, '_id'); const chunks = parts.map(part => { - const chunk = new ChunkDB({ ...chunks_db_by_id[part.chunk.toHexString()], parts: [part] }); + const chunk = new ChunkDB({ ...chunks_db_by_id[part.chunk], parts: [part] }); return chunk; }); await map_server.prepare_chunks({ chunks }); diff --git a/src/server/object_services/map_server.js b/src/server/object_services/map_server.js index d6ce2c8ac5..1427705903 100644 --- a/src/server/object_services/map_server.js +++ b/src/server/object_services/map_server.js @@ -169,8 +169,8 @@ class GetMapping { return false; } const pool = chunk.tier.system.pools_by_name[node.pool]; - alloc.block_md.node = node._id.toHexString(); - alloc.block_md.pool = pool._id.toHexString(); + alloc.block_md.node = node._id; + alloc.block_md.pool = pool._id; alloc.block_md.address = node.rpc_address; alloc.block_md.node_type = node.node_type; alloc.locality_level = 0; @@ -505,7 +505,7 @@ async function ensure_room_in_tier(tier, bucket) { */ function enough_room_in_tier(tier, bucket) { const tiering = bucket.tiering; - const tier_id_str = tier._id.toHexString(); + const tier_id_str = tier._id; const tiering_status = node_allocator.get_tiering_status(tiering); const tier_status = tiering_status[tier_id_str]; const tier_in_tiering = _.find(tiering.tiers, t => String(t.tier._id) === tier_id_str); @@ -607,16 +607,16 @@ async function _prepare_chunks_group({ chunks, move_to_tier, location_info }) { */ async function prepare_blocks(blocks) { if (!blocks || !blocks.length) return; - const node_ids = _.uniqBy(blocks.map(block => block.node_id), id => id.toHexString()); + const node_ids = _.uniqBy(blocks.map(block => block.node_id), id => id); const system_id = blocks[0].system._id; const { nodes } = await nodes_client.instance().list_nodes_by_identity( system_id, - node_ids.map(id => ({ id: id.toHexString() })), + node_ids.map(id => ({ id: id })), nodes_client.NODE_FIELDS_FOR_MAP ); const nodes_by_id = _.keyBy(nodes, '_id'); for (const block of blocks) { - const node = nodes_by_id[block.node_id.toHexString()]; + const node = nodes_by_id[block.node_id]; /** @type {nb.Pool} */ const pool = system_store.data.systems[0].pools_by_name[node.pool]; block.set_node(node, pool); @@ -633,9 +633,9 @@ async function prepare_blocks_from_db(blocks) { const chunks = await MDStore.instance().find_chunks_by_ids(chunk_ids); const chunks_by_id = _.keyBy(chunks, '_id'); const db_blocks = blocks.map(block => { - const chunk_db = new ChunkDB(chunks_by_id[block.chunk.toHexString()]); + const chunk_db = new ChunkDB(chunks_by_id[block.chunk]); const frag_db = _.find(chunk_db.frags, frag => - frag._id.toHexString() === block.frag.toHexString()); + frag._id === block.frag); const block_db = new BlockDB(block, frag_db, chunk_db); return block_db; }); diff --git a/src/server/object_services/mapper.js b/src/server/object_services/mapper.js index d5f0fddef6..7231565696 100644 --- a/src/server/object_services/mapper.js +++ b/src/server/object_services/mapper.js @@ -24,7 +24,7 @@ function select_tier_for_write(tiering, tiering_status, start_tier_order) { if (t.disabled) continue; if (start_tier_order >= 0 && t.order < start_tier_order) continue; if (!selected) selected = t; - const tier_status = tiering_status[t.tier._id.toHexString()]; + const tier_status = tiering_status[t.tier._id]; const tier_has_space = t.tier.mirrors.every((mirror, i) => size_utils.json_to_bigint(tier_status.mirrors_storage[i].free) .greater(config.MIN_TIER_FREE_THRESHOLD) @@ -47,7 +47,7 @@ function select_tier_for_write(tiering, tiering_status, start_tier_order) { * @returns {nb.TierMirror} */ function select_mirror_for_write(tier, tiering, tiering_status, location_info) { - const tier_status = tiering_status[tier._id.toHexString()]; + const tier_status = tiering_status[tier._id]; let mirror_index = 0; let selected; let selected_weight; @@ -55,7 +55,7 @@ function select_mirror_for_write(tier, tiering, tiering_status, location_info) { const mirror_status = tier_status.mirrors_storage[mirror_index]; const local_pool = find_local_pool(mirror.spread_pools, location_info); const is_mongo_included = mirror.spread_pools.some(pool => Boolean(pool.mongo_pool_info)); - const is_local_pool_valid = local_pool && tier_status.pools[local_pool._id.toHexString()].valid_for_allocation; + const is_local_pool_valid = local_pool && tier_status.pools[local_pool._id].valid_for_allocation; const is_regular_pools_valid = size_utils.json_to_bigint(mirror_status.regular_free).greater(config.MIN_TIER_FREE_THRESHOLD); const is_redundant_pools_valid = size_utils.json_to_bigint(mirror_status.redundant_free).greater(config.MIN_TIER_FREE_THRESHOLD); @@ -91,7 +91,7 @@ function select_mirror_for_write(tier, tiering, tiering_status, location_info) { * @param {nb.LocationInfo} [location_info] */ function map_chunk(chunk, tier, tiering, tiering_status, location_info) { - const tier_status = tiering_status[tier._id.toHexString()]; + const tier_status = tiering_status[tier._id]; const blocks_in_use = new Set(); const is_new_chunk = !chunk._id; @@ -203,7 +203,7 @@ function map_chunk(chunk, tier, tiering, tiering_status, location_info) { for (const block of accessible_blocks) { // block on pools that do not belong to the current mirror anymore // can be accessible but will eventually be deallocated - const block_pool_in_mirror = mirror.spread_pools.find(pool => pool._id.toHexString() === block.pool_id.toHexString()); + const block_pool_in_mirror = mirror.spread_pools.find(pool => pool._id === block.pool_id); const is_misplaced = !block.node.writable; if (!is_misplaced && block_pool_in_mirror) { used_blocks.push(block); @@ -418,7 +418,7 @@ function _pool_has_redundancy(pool) { function find_local_pool(pools, location_info) { return location_info && pools.find(pool => (location_info.region && location_info.region === pool.region) || - (location_info.pool_id === pool._id.toHexString()) + (location_info.pool_id === pool._id) ); } diff --git a/src/server/object_services/md_store.js b/src/server/object_services/md_store.js index 90cd1ad68c..79ddfd72d9 100644 --- a/src/server/object_services/md_store.js +++ b/src/server/object_services/md_store.js @@ -6,9 +6,8 @@ const _ = require('lodash'); const assert = require('assert'); const moment = require('moment'); -const mongodb = require('mongodb'); const mime = require('mime'); - +const ObjectID = require('../../util/objectid.js'); const P = require('../../util/promise'); const dbg = require('../../util/debug_module')(__filename); const db_client = require('../../util/db_client'); @@ -80,18 +79,18 @@ class MDStore { if (zero_suffix) { suffix = '0'.repeat(16); } else { - suffix = String(new mongodb.ObjectId()).slice(8, 24); + suffix = String((new ObjectID(null)).toString()).slice(8, 24); } const hex_id = padded_hex_time + suffix; assert(padded_hex_time.length === 8); assert(suffix.length === 16); assert(hex_id.length === 24); assert(parseInt(padded_hex_time, 16) === Math.floor(time / 1000)); - return new mongodb.ObjectId(hex_id); + return new ObjectID(hex_id); } is_valid_md_id(id_str) { - return mongodb.ObjectId.isValid(id_str); + return ObjectID.isValid(id_str); } ///////////// @@ -105,6 +104,7 @@ class MDStore { } async update_object_by_id(obj_id, set_updates, unset_updates, inc_updates) { + dbg.error('update_object_by_id xxxxxxxxxxxxxxx ', obj_id); dbg.log1('update_object_by_id:', obj_id, compact_updates(set_updates, unset_updates, inc_updates)); const res = await this._objects.updateOne({ _id: obj_id }, compact_updates(set_updates, unset_updates, inc_updates) @@ -156,6 +156,7 @@ class MDStore { } async find_object_latest(bucket_id, key) { + dbg.error('find_object_latest xxxxxxxxxxxxxxx ', bucket_id); return this._objects.findOne({ // index fields: bucket: bucket_id, @@ -1188,8 +1189,8 @@ class MDStore { const parts_by_chunk = _.groupBy(parts, 'chunk'); const objects_by_id = _.keyBy(objects, '_id'); for (const chunk of chunks) { - chunk.parts = parts_by_chunk[chunk._id.toHexString()] || []; - chunk.objects = _.uniq(_.compact(_.map(chunk.parts, part => objects_by_id[part.obj.toHexString()]))); + chunk.parts = parts_by_chunk[chunk._id] || []; + chunk.objects = _.uniq(_.compact(_.map(chunk.parts, part => objects_by_id[part.obj]))); } } @@ -1633,9 +1634,9 @@ class MDStore { }); const blocks_by_chunk = _.groupBy(blocks, 'chunk'); for (const chunk of chunks) { - const blocks_by_frag = _.groupBy(blocks_by_chunk[chunk._id.toHexString()], 'frag'); + const blocks_by_frag = _.groupBy(blocks_by_chunk[chunk._id], 'frag'); for (const frag of chunk.frags) { - const frag_blocks = blocks_by_frag[frag._id.toHexString()] || []; + const frag_blocks = blocks_by_frag[frag._id] || []; frag.blocks = sorter ? frag_blocks.sort(sorter) : frag_blocks; } } @@ -1869,7 +1870,7 @@ function sort_list_uploads_with_delimiter(a, b) { * @returns {nb.ID} */ function make_md_id(id_str) { - return new mongodb.ObjectId(id_str); + return (new ObjectID(id_str).toString()); } diff --git a/src/server/object_services/object_server.js b/src/server/object_services/object_server.js index 53432f590d..9520f7d91f 100644 --- a/src/server/object_services/object_server.js +++ b/src/server/object_services/object_server.js @@ -1412,7 +1412,7 @@ function report_endpoint_problems(req) { function get_object_info(md, options = {}) { const bucket = system_store.data.get_by_id(md.bucket); return { - obj_id: md._id.toHexString(), + obj_id: md._id, bucket: bucket.name, key: md.key, size: md.size || 0, @@ -1671,7 +1671,7 @@ function get_etag(entity, updates) { if (sha256_b64) return 'sha256-' + Buffer.from(sha256_b64, 'base64').toString('hex'); const id = updates?._id || entity._id; - if (id) return 'id-' + id.toHexString(); + if (id) return 'id-' + id; return ''; } @@ -2045,7 +2045,7 @@ async function _complete_object_multiparts(obj, multipart_req) { } else { md5.update(get_etag(mp)); } - const mp_parts = parts_by_mp[mp._id.toHexString()] || []; + const mp_parts = parts_by_mp[mp._id] || []; _complete_next_parts(mp_parts, context); used_multiparts.push(mp); for (const part of mp_parts) { diff --git a/src/server/system_services/config_file_store.js b/src/server/system_services/config_file_store.js index 7a9ab7d435..2e7ebb287e 100644 --- a/src/server/system_services/config_file_store.js +++ b/src/server/system_services/config_file_store.js @@ -4,10 +4,9 @@ const _ = require('lodash'); const dbg = require('../../util/debug_module')(__filename); const db_client = require('../../util/db_client'); -const mongodb = require('mongodb'); const config_file_schema = require('./schemas/config_file_schema'); const config_file_indexes = require('./schemas/config_file_indexes'); - +const ObjectID = require('../../util/objectid.js'); class ConfigFileStore { constructor() { @@ -26,7 +25,7 @@ class ConfigFileStore { async insert(item) { dbg.log0(`insert`, item); _.defaults(item, { - _id: new mongodb.ObjectId() + _id: (new ObjectID(null)).toString() }); // There shouldn't be more than one record, this is being on the safe side this._config_files.validate(item); diff --git a/src/server/system_services/master_key_manager.js b/src/server/system_services/master_key_manager.js index 362841b905..f37008bed8 100644 --- a/src/server/system_services/master_key_manager.js +++ b/src/server/system_services/master_key_manager.js @@ -132,7 +132,7 @@ class MasterKeysManager { */ new_master_key(options) { const { description, master_key_id, root_key_id, cipher_type } = options; - const _id = db_client.new_object_id(); + const _id = db_client.new_object_id().toString(); const master_id = (master_key_id && db_client.parse_object_id(master_key_id)) || undefined; const m_key = _.omitBy({ _id, diff --git a/src/server/system_services/replication_store.js b/src/server/system_services/replication_store.js index d42343a4d6..da0f6dfb91 100644 --- a/src/server/system_services/replication_store.js +++ b/src/server/system_services/replication_store.js @@ -2,11 +2,10 @@ 'use strict'; const _ = require('lodash'); -const mongodb = require('mongodb'); const db_client = require('../../util/db_client'); const dbg = require('../../util/debug_module')(__filename); const replication_schema = require('./schemas/replication_configuration_schema'); - +const ObjectID = require('../../util/objectid.js'); class ReplicationStore { constructor() { @@ -25,7 +24,7 @@ class ReplicationStore { item = _.omitBy(item, _.isNil); dbg.log1(`insert_replication`, item); const record = { - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), ...item }; this._replicationconfigs.validate(record); diff --git a/src/server/system_services/system_store.js b/src/server/system_services/system_store.js index 26d710bf4f..9032bb9b09 100644 --- a/src/server/system_services/system_store.js +++ b/src/server/system_services/system_store.js @@ -524,7 +524,7 @@ class SystemStore extends EventEmitter { } new_system_store_id() { - return db_client.instance().new_object_id(); + return db_client.instance().new_object_id().toString(); } parse_system_store_id(id_str) { diff --git a/src/test/unit_tests/coretest.js b/src/test/unit_tests/coretest.js index 9d084d06bc..b57f1c0a64 100644 --- a/src/test/unit_tests/coretest.js +++ b/src/test/unit_tests/coretest.js @@ -300,6 +300,7 @@ async function setup_pools(pools_to_create) { } function log(...args) { + console.log(...args); if (process.env.SUPPRESS_LOGS) return; console.log(...args); } diff --git a/src/test/unit_tests/jest_tests/test_nc_master_keys_exec.test.js b/src/test/unit_tests/jest_tests/test_nc_master_keys_exec.test.js index 9dde0c8b4a..70c2c01cd2 100644 --- a/src/test/unit_tests/jest_tests/test_nc_master_keys_exec.test.js +++ b/src/test/unit_tests/jest_tests/test_nc_master_keys_exec.test.js @@ -119,7 +119,7 @@ describe('NC master key manager tests - exec store type', () => { it('_create_master_keys_exec when master keys exist -', async () => { const new_key = { - id: db_client.new_object_id(), + id: db_client.new_object_id().toString(), cipher_key: crypto.randomBytes(32).toString('base64'), cipher_iv: crypto.randomBytes(16).toString('base64'), encryption_type: 'aes-256-gcm' diff --git a/src/test/unit_tests/jest_tests/test_nc_nsfs_account_cli.test.js b/src/test/unit_tests/jest_tests/test_nc_nsfs_account_cli.test.js index 5142207789..a278b1e320 100644 --- a/src/test/unit_tests/jest_tests/test_nc_nsfs_account_cli.test.js +++ b/src/test/unit_tests/jest_tests/test_nc_nsfs_account_cli.test.js @@ -1836,6 +1836,7 @@ describe('cli account flow distinguished_name - permissions', function() { await fs_utils.file_must_exist(new_buckets_path); await set_path_permissions_and_owner(new_buckets_path, { uid: 0, gid: 0 }, 0o700); const res = await exec_manage_cli(type, ACTIONS.ADD, accounts.root.cli_options); + console.log("res ======================", res); assert_account(JSON.parse(res).response.reply, accounts.root.cli_options, false); }, timeout); diff --git a/src/test/unit_tests/signature_test_suite/awscli/awscli_iwgdisgt.sreq b/src/test/unit_tests/signature_test_suite/awscli/awscli_iwgdisgt.sreq index 346b2f4e35..027c50d39d 100644 --- a/src/test/unit_tests/signature_test_suite/awscli/awscli_iwgdisgt.sreq +++ b/src/test/unit_tests/signature_test_suite/awscli/awscli_iwgdisgt.sreq @@ -1,20 +1,19 @@ -PUT /files/util/mongo_utils.js HTTP/1.1 -Host: 127.0.0.1 -Accept-Encoding: identity -Content-Length: 5865 -Content-MD5: lUCXsCayypL6JVFjbf9kAg== -Expect: 100-continue -Date: Thu, 08 Dec 2016 13:02:39 GMT -User-Agent: aws-cli/1.11.26 Python/2.7.10 Darwin/16.1.0 botocore/1.4.83 -Content-Type: application/javascript -Authorization: AWS 123:Zy/+Do9VcaCZcfdno7lXzjw6qHM= - +PUT /files/util/mongo_utils.js HTTP/1.1 +Host: 127.0.0.1 +Accept-Encoding: identity +Content-Length: 5865 +Content-MD5: lUCXsCayypL6JVFjbf9kAg== +Expect: 100-continue +Date: Thu, 08 Dec 2016 13:02:39 GMT +User-Agent: aws-cli/1.11.26 Python/2.7.10 Darwin/16.1.0 botocore/1.4.83 +Content-Type: application/javascript +Authorization: AWS 123:Zy/+Do9VcaCZcfdno7lXzjw6qHM= + 'use strict'; const _ = require('lodash'); const util = require('util'); -const mongodb = require('mongodb'); -const mongoose = require('mongoose'); +const ObjectID = require('../../../../util/objectid.js'); const P = require('./promise'); const RpcError = require('../rpc/rpc_error'); @@ -108,7 +107,7 @@ function populate(docs, doc_path, collection, fields) { function resolve_object_ids_recursive(idmap, item) { _.each(item, (val, key) => { - if (val instanceof mongodb.ObjectId) { + if (val instanceof ObjectID) { if (key !== '_id') { const obj = idmap[val]; if (obj) { @@ -144,7 +143,7 @@ function resolve_object_ids_paths(idmap, item, paths, allow_missing) { } function make_object_id(id_str) { - return new mongodb.ObjectId(id_str); + return (new ObjectID(id_str)).toString(); } function fix_id_type(doc) { @@ -157,12 +156,10 @@ function fix_id_type(doc) { } // apparently mongoose defined it's own class of ObjectID -// instead of using the class from mongodb driver, // so we have to check both for now, // until we can get rid of mongoose completely. function is_object_id(id) { - return (id instanceof mongodb.ObjectId) || - (id instanceof mongoose.Types.ObjectId); + return (id instanceof ObjectID); } function is_err_duplicate_key(err) { diff --git a/src/test/unit_tests/test_agent_blocks_reclaimer.js b/src/test/unit_tests/test_agent_blocks_reclaimer.js index 02d6c87f33..89aceccf60 100644 --- a/src/test/unit_tests/test_agent_blocks_reclaimer.js +++ b/src/test/unit_tests/test_agent_blocks_reclaimer.js @@ -4,13 +4,11 @@ // setup coretest first to prepare the env const coretest = require('./coretest'); coretest.setup({ pools_to_create: [coretest.POOL_LIST[0]] }); - +const ObjectID = require('../../util/objectid.js'); const _ = require('lodash'); const mocha = require('mocha'); const assert = require('assert'); const crypto = require('crypto'); -const mongodb = require('mongodb'); - const P = require('../../util/promise'); const config = require('../../../config'); const db_client = require('../../util/db_client'); @@ -246,13 +244,13 @@ mocha.describe('mocked agent_blocks_reclaimer', function() { mocha.it('should mark reclaimed on deleted nodes', async function() { const self = this; // eslint-disable-line no-invalid-this const nodes = [{ - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), rpc_address: 'n2n://SlothTown', online: false, deleted: new Date() }]; const blocks = [{ - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), node: nodes[0]._id, deleted: new Date() }]; @@ -271,12 +269,12 @@ mocha.describe('mocked agent_blocks_reclaimer', function() { mocha.it('should not mark reclaimed on offline nodes', async function() { const self = this; // eslint-disable-line no-invalid-this const nodes = [{ - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), rpc_address: 'n2n://SlothTown', online: false, }]; const blocks = [{ - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), node: nodes[0]._id, deleted: new Date(), fail_to_delete: true @@ -296,14 +294,14 @@ mocha.describe('mocked agent_blocks_reclaimer', function() { mocha.it('should mark reclaimed on non existing nodes', async function() { const self = this; // eslint-disable-line no-invalid-this const nodes = [{ - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), rpc_address: 'n2n://SlothTown', online: true, }]; const blocks = [{ - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), // Non existing node on purpose - node: new mongodb.ObjectId(), + node: (new ObjectID(null)).toString(), deleted: new Date() }]; const reclaimer_mock = @@ -321,16 +319,16 @@ mocha.describe('mocked agent_blocks_reclaimer', function() { mocha.it('should not mark reclaimed on failure to delete', async function() { const self = this; // eslint-disable-line no-invalid-this const nodes = [{ - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), rpc_address: 'n2n://SlothTown', online: true, }]; const blocks = [{ - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), node: nodes[0]._id, deleted: new Date() }, { - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), node: nodes[0]._id, deleted: new Date(), fail_to_delete: true diff --git a/src/test/unit_tests/test_agent_blocks_verifier.js b/src/test/unit_tests/test_agent_blocks_verifier.js index 9c3c91ea74..baa7449675 100644 --- a/src/test/unit_tests/test_agent_blocks_verifier.js +++ b/src/test/unit_tests/test_agent_blocks_verifier.js @@ -14,10 +14,9 @@ const P = require('../../util/promise'); const AgentBlocksVerifier = require('../../server/bg_services/agent_blocks_verifier').AgentBlocksVerifier; const db_client = require('../../util/db_client'); const schema_utils = require('../../util/schema_utils'); -const mongodb = require('mongodb'); const config = require('../../../config'); const { ChunkDB, BlockDB } = require('../../server/object_services/map_db_types'); - +const ObjectID = require('../../util/objectid.js'); class VerifierMock extends AgentBlocksVerifier { /** * @@ -75,7 +74,7 @@ class VerifierMock extends AgentBlocksVerifier { const pools_name = _.keyBy(this.pools, 'name'); const db_blocks = blocks.map(block => { const chunk_id = _.get(block, 'chunk'); - const chunk = new ChunkDB(chunks_idmap[chunk_id.toHexString()]); + const chunk = new ChunkDB(chunks_idmap[chunk_id]); const frag = chunk.frags[0]; const db_block = new BlockDB(block, frag, chunk); const id = _.get(block, doc_path); @@ -122,15 +121,15 @@ class VerifierMock extends AgentBlocksVerifier { mocha.describe('mocked agent_blocks_verifier', function() { - const tier_id = new mongodb.ObjectId(); - const bucket_id = new mongodb.ObjectId(); - const system_id = new mongodb.ObjectId(); + const tier_id = (new ObjectID(null)).toString(); + const bucket_id = (new ObjectID(null)).toString(); + const system_id = (new ObjectID(null)).toString(); mocha.it('should verify blocks on nodes', function() { const self = this; // eslint-disable-line no-invalid-this const nodes = [make_node('bla2', false)]; const chunk_coder_configs = [{ - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), chunk_coder_config: { frag_digest_type: 'sloth_type' } @@ -154,7 +153,7 @@ mocha.describe('mocked agent_blocks_verifier', function() { const self = this; // eslint-disable-line no-invalid-this const nodes = [make_node('bla1', true)]; const chunk_coder_configs = [{ - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), chunk_coder_config: { frag_digest_type: 'sloth_type' } @@ -179,7 +178,7 @@ mocha.describe('mocked agent_blocks_verifier', function() { const self = this; // eslint-disable-line no-invalid-this // const nodes = [make_node('node1')]; const chunk_coder_configs = [{ - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), system: system_id, chunk_coder_config: { frag_digest_type: 'sloth_type' @@ -187,7 +186,7 @@ mocha.describe('mocked agent_blocks_verifier', function() { }]; const chunks = [make_schema_chunk(chunk_coder_configs[0]._id, [make_schema_frag()])]; const pools = [make_schema_pool('pool1')]; - const blocks = [make_schema_block(chunks[0].frags[0]._id, chunks[0]._id, new mongodb.ObjectId(), pools[0]._id)]; + const blocks = [make_schema_block(chunks[0].frags[0]._id, chunks[0]._id, (new ObjectID(null)).toString(), pools[0]._id)]; const verifier_mock = new VerifierMock(blocks, [], chunks, pools); return P.resolve() @@ -209,7 +208,7 @@ mocha.describe('mocked agent_blocks_verifier', function() { */ function make_schema_block(frag_id, chunk_id, node_id, pool_id) { return { - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), node: node_id, frag: frag_id, chunk: chunk_id, @@ -226,7 +225,7 @@ mocha.describe('mocked agent_blocks_verifier', function() { */ function make_schema_frag() { return { - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), digest: Buffer.from('bla') }; } @@ -237,7 +236,7 @@ mocha.describe('mocked agent_blocks_verifier', function() { */ function make_schema_chunk(cc_id, frags) { return { - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), system: system_id, bucket: bucket_id, tier: tier_id, @@ -263,7 +262,7 @@ mocha.describe('mocked agent_blocks_verifier', function() { */ function make_node(node_name, offline) { return { - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), name: node_name, pool: 'pool1', node_type: 'BLOCK_STORE_FS', @@ -293,7 +292,7 @@ mocha.describe('mocked agent_blocks_verifier', function() { */ function make_schema_pool(name) { return { - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), name: name, system: undefined, resource_type: 'HOSTS', diff --git a/src/test/unit_tests/test_lifecycle.js b/src/test/unit_tests/test_lifecycle.js index 80d468d149..06c41462c6 100644 --- a/src/test/unit_tests/test_lifecycle.js +++ b/src/test/unit_tests/test_lifecycle.js @@ -8,9 +8,8 @@ const { NodeHttpHandler } = require("@smithy/node-http-handler"); const util = require('util'); const mocha = require('mocha'); const assert = require('assert'); -const mongodb = require('mongodb'); const { v4: uuid } = require('uuid'); - +const ObjectID = require('../../util/objectid.js'); const P = require('../../util/promise'); const config = require('../../../config'); const MDStore = require('../../server/object_services/md_store').MDStore; @@ -108,7 +107,7 @@ mocha.describe('lifecycle', () => { if (tagging) update.tagging = tagging; console.log('create_mock_object bucket', bucket, 'key', key, 'update', util.inspect(update)); - const id = new mongodb.ObjectId(obj_id); + const id = new ObjectID(obj_id); console.log('create_mock_object id', id, 'obj_id', obj_id); const updateResult = await MDStore.instance().update_object_by_id(id, update); diff --git a/src/test/unit_tests/test_map_client.js b/src/test/unit_tests/test_map_client.js index c91e2ead8b..8ebcf1878a 100644 --- a/src/test/unit_tests/test_map_client.js +++ b/src/test/unit_tests/test_map_client.js @@ -67,7 +67,7 @@ coretest.describe_mapper_test_case({ // data_index: 1, // blocks: [{ // block_md: { - // id: db_client.instance().new_object_id(), + // id: db_client.instance().new_object_id().toString(), // node: nodes[0]._id, // pool: system.pools_by_name[nodes[0].pool]._id, // } @@ -405,7 +405,7 @@ coretest.describe_mapper_test_case({ blocks: [ { block_md: { - id: db_client.instance().new_object_id(), + id: db_client.instance().new_object_id().toString(), pool: Object.values(system.pools_by_name)[0]._id, address: 'fcall://mocked_address', }, @@ -432,12 +432,12 @@ coretest.describe_mapper_test_case({ }; const tier = { - _id: db_client.instance().new_object_id(), + _id: db_client.instance().new_object_id().toString(), name: 'mocked_tier', system: system, data_placement: 'MIRROR', chunk_config: { - _id: db_client.instance().new_object_id(), + _id: db_client.instance().new_object_id().toString(), system: system, chunk_coder_config: chunk_config }, @@ -445,7 +445,7 @@ coretest.describe_mapper_test_case({ { spread_pools: [ { - _id: db_client.instance().new_object_id(), + _id: db_client.instance().new_object_id().toString(), name: 'mocked_pool', } ] diff --git a/src/test/unit_tests/test_map_reader.js b/src/test/unit_tests/test_map_reader.js index 188681fc0a..ae72bd9671 100644 --- a/src/test/unit_tests/test_map_reader.js +++ b/src/test/unit_tests/test_map_reader.js @@ -9,8 +9,7 @@ coretest.setup({ pools_to_create: [coretest.POOL_LIST[0]] }); // const util = require('util'); const mocha = require('mocha'); // const assert = require('assert'); -const mongodb = require('mongodb'); - +const ObjectID = require('../../util/objectid.js'); // const P = require('../../util/promise'); // const MDStore = require('../../server/object_services/md_store').MDStore; // const map_writer = require('../../server/object_services/map_writer'); @@ -40,14 +39,14 @@ coretest.describe_mapper_test_case({ // TODO test_map_reader mocha.it('read_object_mapping', function() { - const obj = { size: 100, _id: new mongodb.ObjectId() }; + const obj = { size: 100, _id: (new ObjectID(null)).toString() }; const start = 0; const end = 100; return map_reader.read_object_mapping(obj, start, end); }); mocha.it('read_object_mapping_admin', function() { - const obj = { size: 100, _id: new mongodb.ObjectId() }; + const obj = { size: 100, _id: (new ObjectID(null)).toString() }; const skip = 0; const limit = 100; return map_reader.read_object_mapping_admin(obj, skip, limit); diff --git a/src/test/unit_tests/test_mapper.js b/src/test/unit_tests/test_mapper.js index 9581a41468..736777bb07 100644 --- a/src/test/unit_tests/test_mapper.js +++ b/src/test/unit_tests/test_mapper.js @@ -5,16 +5,14 @@ // setup coretest first to prepare the env const coretest = require('./coretest'); coretest.no_setup(); - +const ObjectID = require('../../util/objectid.js'); const _ = require('lodash'); const util = require('util'); const mocha = require('mocha'); const assert = require('assert'); -const mongodb = require('mongodb'); const config = require('../../../config.js'); const mapper = require('../../server/object_services/mapper'); - coretest.describe_mapper_test_case({ name: 'mapper', }, ({ @@ -32,45 +30,45 @@ coretest.describe_mapper_test_case({ }) => { const frags = _.concat( - _.times(data_frags, data_index => ({ _id: new mongodb.ObjectId(), data_index })), - _.times(parity_frags, parity_index => ({ _id: new mongodb.ObjectId(), parity_index })) + _.times(data_frags, data_index => ({ _id: (new ObjectID(null)).toString(), data_index })), + _.times(parity_frags, parity_index => ({ _id: (new ObjectID(null)).toString(), parity_index })) ); - const first_pools = _.times(num_pools, i => ({ _id: new mongodb.ObjectId(), name: 'first_pool' + i, })); - const second_pools = _.times(num_pools, i => ({ _id: new mongodb.ObjectId(), name: 'second_pool' + i, })); - const external_pools = _.times(num_pools, i => ({ _id: new mongodb.ObjectId(), name: 'external_pool' + i, })); + const first_pools = _.times(num_pools, i => ({ _id: (new ObjectID(null)).toString(), name: 'first_pool' + i, })); + const second_pools = _.times(num_pools, i => ({ _id: (new ObjectID(null)).toString(), name: 'second_pool' + i, })); + const external_pools = _.times(num_pools, i => ({ _id: (new ObjectID(null)).toString(), name: 'external_pool' + i, })); const pool_by_id = _.keyBy(_.concat(first_pools, second_pools, external_pools), '_id'); const first_mirrors = data_placement === 'MIRROR' ? first_pools.map(pool => ({ - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), spread_pools: [pool] })) : [{ - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), spread_pools: first_pools }]; const second_mirrors = data_placement === 'MIRROR' ? second_pools.map(pool => ({ - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), spread_pools: [pool] })) : [{ - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), spread_pools: second_pools }]; const first_tier = { - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), name: 'first_tier', data_placement, mirrors: first_mirrors, chunk_config: { chunk_coder_config }, }; const second_tier = { - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), name: 'second_tier', data_placement, mirrors: second_mirrors, chunk_config: { chunk_coder_config }, }; const tiering = { - _id: new mongodb.ObjectId(), + _id: (new ObjectID(null)).toString(), name: 'tiering_policy', tiers: [{ order: 0, @@ -653,7 +651,7 @@ coretest.describe_mapper_test_case({ const pool = params.pool || pools_to_use[pool_i]; const pool_name = pool.name; - const _id = new mongodb.ObjectID(); + const _id = (new ObjectID(null)).toString(); const _id_str = _id.toString(); return { diff --git a/src/test/unit_tests/test_md_store.js b/src/test/unit_tests/test_md_store.js index ebba3f5361..f4bc4923a6 100644 --- a/src/test/unit_tests/test_md_store.js +++ b/src/test/unit_tests/test_md_store.js @@ -9,10 +9,12 @@ const _ = require('lodash'); const mocha = require('mocha'); const assert = require('assert'); const config = require('./../../../config'); +const ObjectID = require('../../util/objectid.js'); // const P = require('../../util/promise'); const MDStore = require('../../server/object_services/md_store').MDStore; + mocha.describe('md_store', function() { const md_store = new MDStore(`_test_md_store_${Date.now().toString(36)}`); @@ -35,10 +37,12 @@ mocha.describe('md_store', function() { await md_store.insert_object(info); obj = await md_store.find_object_by_id(info._id); + assert_equal(obj, info); await md_store.update_object_by_id(info._id, { size: 777 }, { upload_size: 1 }, { num_parts: 88 }); obj = await md_store.find_object_latest(bucket_id, info.key); + assert_equal(obj, _.defaults({ size: 777, num_parts: 88 }, info)); await md_store.update_object_by_id(info._id, { deleted: new Date() }); @@ -258,14 +262,15 @@ mocha.describe('md_store', function() { mocha.it('find_chunks_by_ids()', async function() { const res = await md_store.find_chunks_by_ids(_.map(chunks, '_id')); - if (config.DB_TYPE === 'mongodb') { + // if (config.DB_TYPE === 'mongodb') { res.forEach(chunk => { if (chunk.digest) chunk.digest = chunk.digest.buffer; if (chunk.cipher_key) chunk.cipher_key = chunk.cipher_key.buffer; if (chunk.cipher_iv) chunk.cipher_iv = chunk.cipher_iv.buffer; if (chunk.cipher_auth_tag) chunk.cipher_auth_tag = chunk.cipher_auth_tag.buffer; }); - } + // } + assert_equal_docs_list(res, chunks); }); @@ -321,6 +326,20 @@ function assert_equal(a, b) { } } +function customEqual(actual, expected) { + if (actual && expected && actual.id && expected.id) { + if (Buffer.isBuffer(actual.id) && Buffer.isBuffer(expected.id)) { + return actual.id.equals(expected.id); + } + if (actual.id instanceof ObjectID && expected.id instanceof ObjectID) { + return actual.id.id.equals(expected.id.id); + } + } + + return _.isEqual(actual, expected); +} + + function assert_equal_docs_list(a, b) { const a_sorted = _.sortBy(a, x => x._id); const b_sorted = _.sortBy(b, x => x._id); diff --git a/src/test/unit_tests/test_schema_keywords.js b/src/test/unit_tests/test_schema_keywords.js index 34bd5a864a..920e5b920d 100644 --- a/src/test/unit_tests/test_schema_keywords.js +++ b/src/test/unit_tests/test_schema_keywords.js @@ -5,8 +5,8 @@ const mocha = require('mocha'); const { default: Ajv } = require('ajv'); const schema_keywords = require('../../util/schema_keywords'); const SensitiveString = require('../../util/sensitive_string'); -const mongodb = require('mongodb'); const assert = require('assert'); +const ObjectID = require('../../util/objectid.js'); /** * @typedef {import('ajv').KeywordCxt} KeywordCxt @@ -75,7 +75,7 @@ mocha.describe('Test Schema Keywords', function() { mocha.it('Test keyword objectid', async function() { const validator = ajv.getSchema('test_schema_keywords#/methods/params'); - const should_pass = { key3: new mongodb.ObjectId() }; + const should_pass = { key3: (new ObjectID(null)).toString() }; assert.strictEqual(validator(should_pass), true); const should_fail = { key3: 'not_an_objectid' }; assert.strictEqual(validator(should_fail), false); diff --git a/src/test/unit_tests/test_tiering_upload.js b/src/test/unit_tests/test_tiering_upload.js index f36d639d29..9bb793ce4f 100644 --- a/src/test/unit_tests/test_tiering_upload.js +++ b/src/test/unit_tests/test_tiering_upload.js @@ -260,7 +260,7 @@ mocha.describe('tiering upload', function() { await node_allocator.refresh_tiering_alloc(tiering, 'force'); const tiering_status = node_allocator.get_tiering_status(tiering); - const tier_status = tiering_status[tier0._id.toHexString()]; + const tier_status = tiering_status[tier0._id]; const storage = tier_status.mirrors_storage[0]; // const { storage } = await rpc_client.tier.read_tier({ name: TIER0 }); coretest.log('get_current_storage:', util.inspect(storage, { depth: null })); diff --git a/src/tools/mapper_speed.js b/src/tools/mapper_speed.js index 706fc72479..7a45b36319 100644 --- a/src/tools/mapper_speed.js +++ b/src/tools/mapper_speed.js @@ -25,7 +25,7 @@ async function main() { /** @type {nb.System} */ const system = { - _id: db_client.instance().new_object_id(), + _id: db_client.instance().new_object_id().toString(), name: 'system', buckets_by_name: {}, chunk_configs_by_id: {}, @@ -49,14 +49,14 @@ async function main() { /** @type {nb.ChunkConfig} */ const chunk_config = { - _id: db_client.instance().new_object_id(), + _id: db_client.instance().new_object_id().toString(), system, chunk_coder_config, }; /** @type {nb.Pool} */ const pool = { - _id: db_client.instance().new_object_id(), + _id: db_client.instance().new_object_id().toString(), name: 'pool', system, resource_type: 'HOSTS', @@ -65,20 +65,20 @@ async function main() { /** @type {nb.Tier} */ const tier = { - _id: db_client.instance().new_object_id(), + _id: db_client.instance().new_object_id().toString(), name: 'tier', system, chunk_config, data_placement: 'SPREAD', mirrors: [{ - _id: db_client.instance().new_object_id(), + _id: db_client.instance().new_object_id().toString(), spread_pools: [pool] }], }; /** @type {nb.Tiering} */ const tiering = { - _id: db_client.instance().new_object_id(), + _id: db_client.instance().new_object_id().toString(), name: 'tiering', system, tiers: [{ tier, order: 0, disabled: false }], @@ -87,7 +87,7 @@ async function main() { /** @type {nb.Bucket} */ const bucket = { - _id: db_client.instance().new_object_id(), + _id: db_client.instance().new_object_id().toString(), name: 'bucket', system, tiering, @@ -104,10 +104,10 @@ async function main() { /** @type {nb.TieringStatus} */ const tiering_status = { - [tier._id.toHexString()]: { + [tier._id]: { mirrors_storage: tier.mirrors.map(mirror => FULL_STORAGE), pools: { - [tier.mirrors[0].spread_pools[0]._id.toHexString()]: { + [tier.mirrors[0].spread_pools[0]._id]: { num_nodes: 1000, resource_type: 'HOSTS', valid_for_allocation: true, @@ -136,12 +136,12 @@ async function main() { tieringpolicies: [tiering], buckets: [bucket], idmap: { - [system._id.toHexString()]: system, - [bucket._id.toHexString()]: bucket, - [tiering._id.toHexString()]: tiering, - [tier._id.toHexString()]: tier, - [pool._id.toHexString()]: pool, - [system._id.toHexString()]: system, + [system._id]: system, + [bucket._id]: bucket, + [tiering._id]: tiering, + [tier._id]: tier, + [pool._id]: pool, + [system._id]: system, }, time: Date.now(), roles: [], @@ -170,13 +170,13 @@ async function main() { for (let i = 0; i < NUM_CHUNKS; ++i) { /** @type {nb.ChunkInfo} */ const chunk_info = { - // _id: new_object_id().toHexString(), - bucket_id: bucket._id.toHexString(), + // _id: new_object_id().toString(), + bucket_id: bucket._id, chunk_coder_config, size: CHUNK_SIZE, frag_size: CHUNK_SIZE, frags: [{ - _id: db_client.instance().new_object_id().toHexString(), + _id: db_client.instance().new_object_id().toString(), data_index: 0, digest_b64: crypto.createHash(chunk_coder_config.frag_digest_type).digest('base64'), blocks: [], @@ -185,9 +185,9 @@ async function main() { start: 0, end: CHUNK_SIZE, seq: 0, - obj_id: db_client.instance().new_object_id().toHexString(), - chunk_id: db_client.instance().new_object_id().toHexString(), - multipart_id: db_client.instance().new_object_id().toHexString(), + obj_id: db_client.instance().new_object_id().toString(), + chunk_id: db_client.instance().new_object_id().toString(), + multipart_id: db_client.instance().new_object_id().toString(), }] }; const chunk = new ChunkAPI(chunk_info, system_store); diff --git a/src/tools/md_blow.js b/src/tools/md_blow.js index 6a39005b3c..2c2a161785 100644 --- a/src/tools/md_blow.js +++ b/src/tools/md_blow.js @@ -102,7 +102,7 @@ async function blow_parts(params) { allocations: [{ mirror_group: 'abc', block_md: { - id: db_client.instance().new_object_id().toHexString(), + id: db_client.instance().new_object_id().toString(), node: node._id, pool: pool_db._id, size: argv.chunk_size diff --git a/src/upgrade/migrator.js b/src/upgrade/migrator.js index def8589966..6a4995d032 100644 --- a/src/upgrade/migrator.js +++ b/src/upgrade/migrator.js @@ -53,7 +53,7 @@ class Migrator { this.migrate_status = await this.upgrade_table.findOne({}); if (!this.migrate_status) { this.migrate_status = { - _id: this.to_client.new_object_id(), + _id: this.to_client.new_object_id().toString(), collection_index: 0, collection_name: this.collection_list[0].name, last_move_size: 0, diff --git a/src/util/db_client.js b/src/util/db_client.js index 78bcee9d5e..64c8c8c85f 100644 --- a/src/util/db_client.js +++ b/src/util/db_client.js @@ -2,14 +2,13 @@ /** @typedef {typeof import('../sdk/nb')} nb */ 'use strict'; -const mongodb = require('mongodb'); const { EventEmitter } = require('events'); const dbg = require('./debug_module')(__filename); const config = require('../../config'); const mongo_client = require('./mongo_client'); const postgres_client = require('./postgres_client'); - +const ObjectID = require('../util/objectid.js'); /** * A simple noop db client for cases where we run without a DB. * @implements {nb.DBClient} @@ -36,8 +35,8 @@ class NoneDBClient extends EventEmitter { async populate(docs, doc_path, collection, fields) { return this.noop(); } resolve_object_ids_recursive(idmap, item) { return this.noop(); } resolve_object_ids_paths(idmap, item, paths, allow_missing) { return this.noop(); } - new_object_id() { return new mongodb.ObjectId(); } - parse_object_id(id_str) { return new mongodb.ObjectId(String(id_str || undefined)); } + new_object_id().toString() { return (new ObjectID(null)).toString(); } + parse_object_id(id_str) { return new ObjectID(String(id_str || undefined)); } fix_id_type(doc) { return doc; } is_object_id(id) { return false; } is_err_duplicate_key(err) { return false; } diff --git a/src/util/fnv1a.js b/src/util/fnv1a.js new file mode 100644 index 0000000000..d8c5bbbbb5 --- /dev/null +++ b/src/util/fnv1a.js @@ -0,0 +1,47 @@ +'use strict'; + +const Long = require('./long'); + +const MASK_8 = 0xff; +const MASK_24 = 0xffffff; +const MASK_32 = 0xffffffff; + +// See http://www.isthe.com/chongo/tech/comp/fnv/#FNV-param for the definition of these parameters; +const FNV_PRIME = new Long(16777619, 0); +const OFFSET_BASIS = new Long(2166136261, 0); +const FNV_MASK = new Long(MASK_32, 0); + +/** + * Implementation of the FNV-1a hash for a 32-bit hash value + * Algorithm can be found here: http://www.isthe.com/chongo/tech/comp/fnv/#FNV-1a + * @ignore + */ +function fnv1a32(input, encoding) { + encoding = encoding || 'utf8'; + const octets = Buffer.from(input, encoding); + + let hash = OFFSET_BASIS; + for (let i = 0; i < octets.length; i += 1) { + hash = hash.xor(new Long(octets[i], 0)); + hash = hash.multiply(FNV_PRIME); + hash = hash.and(FNV_MASK); + } + return hash.getLowBitsUnsigned(); +} + +/** + * Implements FNV-1a to generate 32-bit hash, then uses xor-folding + * to convert to a 24-bit hash. See here for more info: + * http://www.isthe.com/chongo/tech/comp/fnv/#xor-fold + * @ignore + */ +function fnv1a24(input, encoding) { + const _32bit = fnv1a32(input, encoding); + const base = _32bit & MASK_24; + const top = (_32bit >>> 24) & MASK_8; + const final = (base ^ top) & MASK_24; + + return final; +} + +module.exports = { fnv1a24, fnv1a32 }; diff --git a/src/util/long.js b/src/util/long.js new file mode 100644 index 0000000000..1e2e897351 --- /dev/null +++ b/src/util/long.js @@ -0,0 +1,852 @@ +'use strict'; +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Copyright 2009 Google Inc. All Rights Reserved + +/** + * Defines a Long class for representing a 64-bit two's-complement + * integer value, which faithfully simulates the behavior of a Java "Long". This + * implementation is derived from LongLib in GWT. + * + * Constructs a 64-bit two's-complement integer, given its low and high 32-bit + * values as *signed* integers. See the from* functions below for more + * convenient ways of constructing Longs. + * + * The internal representation of a Long is the two given signed, 32-bit values. + * We use 32-bit pieces because these are the size of integers on which + * Javascript performs bit-operations. For operations like addition and + * multiplication, we split each number into 16-bit pieces, which can easily be + * multiplied within Javascript's floating-point representation without overflow + * or change in sign. + * + * In the algorithms below, we frequently reduce the negative case to the + * positive case by negating the input(s) and then post-processing the result. + * Note that we must ALWAYS check specially whether those values are MIN_VALUE + * (-2^63) because -MIN_VALUE == MIN_VALUE (since 2^63 cannot be represented as + * a positive number, it overflows back into a negative). Not handling this + * case would often result in infinite recursion. + * + * @class + * @param {number} low the low (signed) 32 bits of the Long. + * @param {number} high the high (signed) 32 bits of the Long. + * @return {Long} + */ +function Long(low, high) { + if (!(this instanceof Long)) return new Long(low, high); + + this._bsontype = 'Long'; + /** + * @type {number} + * @ignore + */ + this.low_ = low | 0; // force into 32 signed bits. + + /** + * @type {number} + * @ignore + */ + this.high_ = high | 0; // force into 32 signed bits. +} + +/** + * Return the int value. + * + * @method + * @return {number} the value, assuming it is a 32-bit integer. + */ +Long.prototype.toInt = function() { + return this.low_; +}; + +/** + * Return the Number value. + * + * @method + * @return {number} the closest floating-point representation to this value. + */ +Long.prototype.toNumber = function() { + return this.high_ * Long.TWO_PWR_32_DBL_ + this.getLowBitsUnsigned(); +}; + +/** + * Return the JSON value. + * + * @method + * @return {string} the JSON representation. + */ +Long.prototype.toJSON = function() { + return this.toString(); +}; + +/** + * Return the String value. + * + * @method + * @param {number} [opt_radix] the radix in which the text should be written. + * @return {string} the textual representation of this value. + */ +Long.prototype.toString = function(opt_radix) { + var radix = opt_radix || 10; + if (radix < 2 || 36 < radix) { + throw Error('radix out of range: ' + radix); + } + + if (this.isZero()) { + return '0'; + } + + if (this.isNegative()) { + if (this.equals(Long.MIN_VALUE)) { + // We need to change the Long value before it can be negated, so we remove + // the bottom-most digit in this base and then recurse to do the rest. + var radixLong = Long.fromNumber(radix); + var div = this.div(radixLong); + var rem = div.multiply(radixLong).subtract(this); + return div.toString(radix) + rem.toInt().toString(radix); + } else { + return '-' + this.negate().toString(radix); + } + } + + // Do several (6) digits each time through the loop, so as to + // minimize the calls to the very expensive emulated div. + var radixToPower = Long.fromNumber(Math.pow(radix, 6)); + + rem = this; + var result = ''; + + while (!rem.isZero()) { + var remDiv = rem.div(radixToPower); + var intval = rem.subtract(remDiv.multiply(radixToPower)).toInt(); + var digits = intval.toString(radix); + + rem = remDiv; + if (rem.isZero()) { + return digits + result; + } else { + while (digits.length < 6) { + digits = '0' + digits; + } + result = '' + digits + result; + } + } +}; + +/** + * Return the high 32-bits value. + * + * @method + * @return {number} the high 32-bits as a signed value. + */ +Long.prototype.getHighBits = function() { + return this.high_; +}; + +/** + * Return the low 32-bits value. + * + * @method + * @return {number} the low 32-bits as a signed value. + */ +Long.prototype.getLowBits = function() { + return this.low_; +}; + +/** + * Return the low unsigned 32-bits value. + * + * @method + * @return {number} the low 32-bits as an unsigned value. + */ +Long.prototype.getLowBitsUnsigned = function() { + return this.low_ >= 0 ? this.low_ : Long.TWO_PWR_32_DBL_ + this.low_; +}; + +/** + * Returns the number of bits needed to represent the absolute value of this Long. + * + * @method + * @return {number} Returns the number of bits needed to represent the absolute value of this Long. + */ +Long.prototype.getNumBitsAbs = function() { + if (this.isNegative()) { + if (this.equals(Long.MIN_VALUE)) { + return 64; + } else { + return this.negate().getNumBitsAbs(); + } + } else { + var val = this.high_ !== 0 ? this.high_ : this.low_; + for (var bit = 31; bit > 0; bit--) { + if ((val & (1 << bit)) !== 0) { + break; + } + } + return this.high_ !== 0 ? bit + 33 : bit + 1; + } +}; + +/** + * Return whether this value is zero. + * + * @method + * @return {boolean} whether this value is zero. + */ +Long.prototype.isZero = function() { + return this.high_ === 0 && this.low_ === 0; +}; + +/** + * Return whether this value is negative. + * + * @method + * @return {boolean} whether this value is negative. + */ +Long.prototype.isNegative = function() { + return this.high_ < 0; +}; + +/** + * Return whether this value is odd. + * + * @method + * @return {boolean} whether this value is odd. + */ +Long.prototype.isOdd = function() { + return (this.low_ & 1) === 1; +}; + +/** + * Return whether this Long equals the other + * + * @method + * @param {Long} other Long to compare against. + * @return {boolean} whether this Long equals the other + */ +Long.prototype.equals = function(other) { + return this.high_ === other.high_ && this.low_ === other.low_; +}; + +/** + * Return whether this Long does not equal the other. + * + * @method + * @param {Long} other Long to compare against. + * @return {boolean} whether this Long does not equal the other. + */ +Long.prototype.notEquals = function(other) { + return this.high_ !== other.high_ || this.low_ !== other.low_; +}; + +/** + * Return whether this Long is less than the other. + * + * @method + * @param {Long} other Long to compare against. + * @return {boolean} whether this Long is less than the other. + */ +Long.prototype.lessThan = function(other) { + return this.compare(other) < 0; +}; + +/** + * Return whether this Long is less than or equal to the other. + * + * @method + * @param {Long} other Long to compare against. + * @return {boolean} whether this Long is less than or equal to the other. + */ +Long.prototype.lessThanOrEqual = function(other) { + return this.compare(other) <= 0; +}; + +/** + * Return whether this Long is greater than the other. + * + * @method + * @param {Long} other Long to compare against. + * @return {boolean} whether this Long is greater than the other. + */ +Long.prototype.greaterThan = function(other) { + return this.compare(other) > 0; +}; + +/** + * Return whether this Long is greater than or equal to the other. + * + * @method + * @param {Long} other Long to compare against. + * @return {boolean} whether this Long is greater than or equal to the other. + */ +Long.prototype.greaterThanOrEqual = function(other) { + return this.compare(other) >= 0; +}; + +/** + * Compares this Long with the given one. + * + * @method + * @param {Long} other Long to compare against. + * @return {boolean} 0 if they are the same, 1 if the this is greater, and -1 if the given one is greater. + */ +Long.prototype.compare = function(other) { + if (this.equals(other)) { + return 0; + } + + var thisNeg = this.isNegative(); + var otherNeg = other.isNegative(); + if (thisNeg && !otherNeg) { + return -1; + } + if (!thisNeg && otherNeg) { + return 1; + } + + // at this point, the signs are the same, so subtraction will not overflow + if (this.subtract(other).isNegative()) { + return -1; + } else { + return 1; + } +}; + +/** + * The negation of this value. + * + * @method + * @return {Long} the negation of this value. + */ +Long.prototype.negate = function() { + if (this.equals(Long.MIN_VALUE)) { + return Long.MIN_VALUE; + } else { + return this.not().add(Long.ONE); + } +}; + +/** + * Returns the sum of this and the given Long. + * + * @method + * @param {Long} other Long to add to this one. + * @return {Long} the sum of this and the given Long. + */ +Long.prototype.add = function(other) { + // Divide each number into 4 chunks of 16 bits, and then sum the chunks. + + var a48 = this.high_ >>> 16; + var a32 = this.high_ & 0xffff; + var a16 = this.low_ >>> 16; + var a00 = this.low_ & 0xffff; + + var b48 = other.high_ >>> 16; + var b32 = other.high_ & 0xffff; + var b16 = other.low_ >>> 16; + var b00 = other.low_ & 0xffff; + + var c48 = 0, + c32 = 0, + c16 = 0, + c00 = 0; + c00 += a00 + b00; + c16 += c00 >>> 16; + c00 &= 0xffff; + c16 += a16 + b16; + c32 += c16 >>> 16; + c16 &= 0xffff; + c32 += a32 + b32; + c48 += c32 >>> 16; + c32 &= 0xffff; + c48 += a48 + b48; + c48 &= 0xffff; + return Long.fromBits((c16 << 16) | c00, (c48 << 16) | c32); +}; + +/** + * Returns the difference of this and the given Long. + * + * @method + * @param {Long} other Long to subtract from this. + * @return {Long} the difference of this and the given Long. + */ +Long.prototype.subtract = function(other) { + return this.add(other.negate()); +}; + +/** + * Returns the product of this and the given Long. + * + * @method + * @param {Long} other Long to multiply with this. + * @return {Long} the product of this and the other. + */ +Long.prototype.multiply = function(other) { + if (this.isZero()) { + return Long.ZERO; + } else if (other.isZero()) { + return Long.ZERO; + } + + if (this.equals(Long.MIN_VALUE)) { + return other.isOdd() ? Long.MIN_VALUE : Long.ZERO; + } else if (other.equals(Long.MIN_VALUE)) { + return this.isOdd() ? Long.MIN_VALUE : Long.ZERO; + } + + if (this.isNegative()) { + if (other.isNegative()) { + return this.negate().multiply(other.negate()); + } else { + return this.negate() + .multiply(other) + .negate(); + } + } else if (other.isNegative()) { + return this.multiply(other.negate()).negate(); + } + + // If both Longs are small, use float multiplication + if (this.lessThan(Long.TWO_PWR_24_) && other.lessThan(Long.TWO_PWR_24_)) { + return Long.fromNumber(this.toNumber() * other.toNumber()); + } + + // Divide each Long into 4 chunks of 16 bits, and then add up 4x4 products. + // We can skip products that would overflow. + + var a48 = this.high_ >>> 16; + var a32 = this.high_ & 0xffff; + var a16 = this.low_ >>> 16; + var a00 = this.low_ & 0xffff; + + var b48 = other.high_ >>> 16; + var b32 = other.high_ & 0xffff; + var b16 = other.low_ >>> 16; + var b00 = other.low_ & 0xffff; + + var c48 = 0, + c32 = 0, + c16 = 0, + c00 = 0; + c00 += a00 * b00; + c16 += c00 >>> 16; + c00 &= 0xffff; + c16 += a16 * b00; + c32 += c16 >>> 16; + c16 &= 0xffff; + c16 += a00 * b16; + c32 += c16 >>> 16; + c16 &= 0xffff; + c32 += a32 * b00; + c48 += c32 >>> 16; + c32 &= 0xffff; + c32 += a16 * b16; + c48 += c32 >>> 16; + c32 &= 0xffff; + c32 += a00 * b32; + c48 += c32 >>> 16; + c32 &= 0xffff; + c48 += a48 * b00 + a32 * b16 + a16 * b32 + a00 * b48; + c48 &= 0xffff; + return Long.fromBits((c16 << 16) | c00, (c48 << 16) | c32); +}; + +/** + * Returns this Long divided by the given one. + * + * @method + * @param {Long} other Long by which to divide. + * @return {Long} this Long divided by the given one. + */ +Long.prototype.div = function(other) { + if (other.isZero()) { + throw Error('division by zero'); + } else if (this.isZero()) { + return Long.ZERO; + } + + if (this.equals(Long.MIN_VALUE)) { + if (other.equals(Long.ONE) || other.equals(Long.NEG_ONE)) { + return Long.MIN_VALUE; // recall that -MIN_VALUE == MIN_VALUE + } else if (other.equals(Long.MIN_VALUE)) { + return Long.ONE; + } else { + // At this point, we have |other| >= 2, so |this/other| < |MIN_VALUE|. + var halfThis = this.shiftRight(1); + var approx = halfThis.div(other).shiftLeft(1); + if (approx.equals(Long.ZERO)) { + return other.isNegative() ? Long.ONE : Long.NEG_ONE; + } else { + var rem = this.subtract(other.multiply(approx)); + var result = approx.add(rem.div(other)); + return result; + } + } + } else if (other.equals(Long.MIN_VALUE)) { + return Long.ZERO; + } + + if (this.isNegative()) { + if (other.isNegative()) { + return this.negate().div(other.negate()); + } else { + return this.negate() + .div(other) + .negate(); + } + } else if (other.isNegative()) { + return this.div(other.negate()).negate(); + } + + // Repeat the following until the remainder is less than other: find a + // floating-point that approximates remainder / other *from below*, add this + // into the result, and subtract it from the remainder. It is critical that + // the approximate value is less than or equal to the real value so that the + // remainder never becomes negative. + var res = Long.ZERO; + rem = this; + while (rem.greaterThanOrEqual(other)) { + // Approximate the result of division. This may be a little greater or + // smaller than the actual value. + approx = Math.max(1, Math.floor(rem.toNumber() / other.toNumber())); + + // We will tweak the approximate result by changing it in the 48-th digit or + // the smallest non-fractional digit, whichever is larger. + var log2 = Math.ceil(Math.log(approx) / Math.LN2); + var delta = log2 <= 48 ? 1 : Math.pow(2, log2 - 48); + + // Decrease the approximation until it is smaller than the remainder. Note + // that if it is too large, the product overflows and is negative. + var approxRes = Long.fromNumber(approx); + var approxRem = approxRes.multiply(other); + while (approxRem.isNegative() || approxRem.greaterThan(rem)) { + approx -= delta; + approxRes = Long.fromNumber(approx); + approxRem = approxRes.multiply(other); + } + + // We know the answer can't be zero... and actually, zero would cause + // infinite recursion since we would make no progress. + if (approxRes.isZero()) { + approxRes = Long.ONE; + } + + res = res.add(approxRes); + rem = rem.subtract(approxRem); + } + return res; +}; + +/** + * Returns this Long modulo the given one. + * + * @method + * @param {Long} other Long by which to mod. + * @return {Long} this Long modulo the given one. + */ +Long.prototype.modulo = function(other) { + return this.subtract(this.div(other).multiply(other)); +}; + +/** + * The bitwise-NOT of this value. + * + * @method + * @return {Long} the bitwise-NOT of this value. + */ +Long.prototype.not = function() { + return Long.fromBits(~this.low_, ~this.high_); +}; + +/** + * Returns the bitwise-AND of this Long and the given one. + * + * @method + * @param {Long} other the Long with which to AND. + * @return {Long} the bitwise-AND of this and the other. + */ +Long.prototype.and = function(other) { + return Long.fromBits(this.low_ & other.low_, this.high_ & other.high_); +}; + +/** + * Returns the bitwise-OR of this Long and the given one. + * + * @method + * @param {Long} other the Long with which to OR. + * @return {Long} the bitwise-OR of this and the other. + */ +Long.prototype.or = function(other) { + return Long.fromBits(this.low_ | other.low_, this.high_ | other.high_); +}; + +/** + * Returns the bitwise-XOR of this Long and the given one. + * + * @method + * @param {Long} other the Long with which to XOR. + * @return {Long} the bitwise-XOR of this and the other. + */ +Long.prototype.xor = function(other) { + return Long.fromBits(this.low_ ^ other.low_, this.high_ ^ other.high_); +}; + +/** + * Returns this Long with bits shifted to the left by the given amount. + * + * @method + * @param {number} numBits the number of bits by which to shift. + * @return {Long} this shifted to the left by the given amount. + */ +Long.prototype.shiftLeft = function(numBits) { + numBits &= 63; + if (numBits === 0) { + return this; + } else { + var low = this.low_; + if (numBits < 32) { + var high = this.high_; + return Long.fromBits(low << numBits, (high << numBits) | (low >>> (32 - numBits))); + } else { + return Long.fromBits(0, low << (numBits - 32)); + } + } +}; + +/** + * Returns this Long with bits shifted to the right by the given amount. + * + * @method + * @param {number} numBits the number of bits by which to shift. + * @return {Long} this shifted to the right by the given amount. + */ +Long.prototype.shiftRight = function(numBits) { + numBits &= 63; + if (numBits === 0) { + return this; + } else { + var high = this.high_; + if (numBits < 32) { + var low = this.low_; + return Long.fromBits((low >>> numBits) | (high << (32 - numBits)), high >> numBits); + } else { + return Long.fromBits(high >> (numBits - 32), high >= 0 ? 0 : -1); + } + } +}; + +/** + * Returns this Long with bits shifted to the right by the given amount, with the new top bits matching the current sign bit. + * + * @method + * @param {number} numBits the number of bits by which to shift. + * @return {Long} this shifted to the right by the given amount, with zeros placed into the new leading bits. + */ +Long.prototype.shiftRightUnsigned = function(numBits) { + numBits &= 63; + if (numBits === 0) { + return this; + } else { + var high = this.high_; + if (numBits < 32) { + var low = this.low_; + return Long.fromBits((low >>> numBits) | (high << (32 - numBits)), high >>> numBits); + } else if (numBits === 32) { + return Long.fromBits(high, 0); + } else { + return Long.fromBits(high >>> (numBits - 32), 0); + } + } +}; + +/** + * Returns a Long representing the given (32-bit) integer value. + * + * @method + * @param {number} value the 32-bit integer in question. + * @return {Long} the corresponding Long value. + */ +Long.fromInt = function(value) { + if (-128 <= value && value < 128) { + var cachedObj = Long.INT_CACHE_[value]; + if (cachedObj) { + return cachedObj; + } + } + + var obj = new Long(value | 0, value < 0 ? -1 : 0); + if (-128 <= value && value < 128) { + Long.INT_CACHE_[value] = obj; + } + return obj; +}; + +/** + * Returns a Long representing the given value, provided that it is a finite number. Otherwise, zero is returned. + * + * @method + * @param {number} value the number in question. + * @return {Long} the corresponding Long value. + */ +Long.fromNumber = function(value) { + if (isNaN(value) || !isFinite(value)) { + return Long.ZERO; + } else if (value <= -Long.TWO_PWR_63_DBL_) { + return Long.MIN_VALUE; + } else if (value + 1 >= Long.TWO_PWR_63_DBL_) { + return Long.MAX_VALUE; + } else if (value < 0) { + return Long.fromNumber(-value).negate(); + } else { + return new Long((value % Long.TWO_PWR_32_DBL_) | 0, (value / Long.TWO_PWR_32_DBL_) | 0); + } +}; + +/** + * Returns a Long representing the 64-bit integer that comes by concatenating the given high and low bits. Each is assumed to use 32 bits. + * + * @method + * @param {number} lowBits the low 32-bits. + * @param {number} highBits the high 32-bits. + * @return {Long} the corresponding Long value. + */ +Long.fromBits = function(lowBits, highBits) { + return new Long(lowBits, highBits); +}; + +/** + * Returns a Long representation of the given string, written using the given radix. + * + * @method + * @param {string} str the textual representation of the Long. + * @param {number} opt_radix the radix in which the text is written. + * @return {Long} the corresponding Long value. + */ +Long.fromString = function(str, opt_radix) { + if (str.length === 0) { + throw Error('number format error: empty string'); + } + + var radix = opt_radix || 10; + if (radix < 2 || 36 < radix) { + throw Error('radix out of range: ' + radix); + } + + if (str.charAt(0) === '-') { + return Long.fromString(str.substring(1), radix).negate(); + } else if (str.indexOf('-') >= 0) { + throw Error('number format error: interior "-" character: ' + str); + } + + // Do several (8) digits each time through the loop, so as to + // minimize the calls to the very expensive emulated div. + var radixToPower = Long.fromNumber(Math.pow(radix, 8)); + + var result = Long.ZERO; + for (var i = 0; i < str.length; i += 8) { + var size = Math.min(8, str.length - i); + var value = parseInt(str.substring(i, i + size), radix); + if (size < 8) { + var power = Long.fromNumber(Math.pow(radix, size)); + result = result.multiply(power).add(Long.fromNumber(value)); + } else { + result = result.multiply(radixToPower); + result = result.add(Long.fromNumber(value)); + } + } + return result; +}; + +// NOTE: Common constant values ZERO, ONE, NEG_ONE, etc. are defined below the +// from* methods on which they depend. + +/** + * A cache of the Long representations of small integer values. + * @type {Object} + * @ignore + */ +Long.INT_CACHE_ = {}; + +// NOTE: the compiler should inline these constant values below and then remove +// these variables, so there should be no runtime penalty for these. + +/** + * Number used repeated below in calculations. This must appear before the + * first call to any from* function below. + * @type {number} + * @ignore + */ +Long.TWO_PWR_16_DBL_ = 1 << 16; + +/** + * @type {number} + * @ignore + */ +Long.TWO_PWR_24_DBL_ = 1 << 24; + +/** + * @type {number} + * @ignore + */ +Long.TWO_PWR_32_DBL_ = Long.TWO_PWR_16_DBL_ * Long.TWO_PWR_16_DBL_; + +/** + * @type {number} + * @ignore + */ +Long.TWO_PWR_31_DBL_ = Long.TWO_PWR_32_DBL_ / 2; + +/** + * @type {number} + * @ignore + */ +Long.TWO_PWR_48_DBL_ = Long.TWO_PWR_32_DBL_ * Long.TWO_PWR_16_DBL_; + +/** + * @type {number} + * @ignore + */ +Long.TWO_PWR_64_DBL_ = Long.TWO_PWR_32_DBL_ * Long.TWO_PWR_32_DBL_; + +/** + * @type {number} + * @ignore + */ +Long.TWO_PWR_63_DBL_ = Long.TWO_PWR_64_DBL_ / 2; + +/** @type {Long} */ +Long.ZERO = Long.fromInt(0); + +/** @type {Long} */ +Long.ONE = Long.fromInt(1); + +/** @type {Long} */ +Long.NEG_ONE = Long.fromInt(-1); + +/** @type {Long} */ +Long.MAX_VALUE = Long.fromBits(0xffffffff | 0, 0x7fffffff | 0); + +/** @type {Long} */ +Long.MIN_VALUE = Long.fromBits(0, 0x80000000 | 0); + +/** + * @type {Long} + * @ignore + */ +Long.TWO_PWR_24_ = Long.fromInt(1 << 24); + +/** + * Expose. + */ +module.exports = Long; +module.exports.Long = Long; diff --git a/src/util/mongo_client.js b/src/util/mongo_client.js index bb5119bf24..cc4d42a87a 100644 --- a/src/util/mongo_client.js +++ b/src/util/mongo_client.js @@ -251,7 +251,7 @@ class MongoClient extends EventEmitter { _.each(item, (val, key) => { if (val instanceof mongodb.ObjectId) { if (key !== '_id') { - const obj = idmap[val.toHexString()]; + const obj = idmap[val]; if (obj) { item[key] = obj; } @@ -287,7 +287,7 @@ class MongoClient extends EventEmitter { /** * @returns {nb.ID} */ - new_object_id() { + new_object_id().toString() { return new mongodb.ObjectId(); } diff --git a/src/util/mongo_utils.js b/src/util/mongo_utils.js index 440abfb257..f7cac3b38c 100644 --- a/src/util/mongo_utils.js +++ b/src/util/mongo_utils.js @@ -101,7 +101,7 @@ const mongodb = require('mongodb'); // _.each(item, (val, key) => { // if (val instanceof mongodb.ObjectId) { // if (key !== '_id') { -// const obj = idmap[val.toHexString()]; +// const obj = idmap[val]; // if (obj) { // item[key] = obj; // } @@ -137,7 +137,7 @@ const mongodb = require('mongodb'); // /** // * @returns {nb.ID} // */ -// function new_object_id() { +// function new_object_id().toString() { // return new mongodb.ObjectId(); // } diff --git a/src/util/objectid.js b/src/util/objectid.js new file mode 100644 index 0000000000..923adb67bc --- /dev/null +++ b/src/util/objectid.js @@ -0,0 +1,130 @@ +'use strict'; + +const { hostname } = require('os'); +const { fnv1a24 } = require('./fnv1a'); +const { Buffer } = require('buffer'); + +const MACHINE_ID = fnv1a24(hostname()); +const HEX_REGEX = /^[0-9a-fA-F]{24}$/; +const HAS_BUFFER_SUPPORT = typeof Buffer !== 'undefined' && typeof Buffer.from === 'function'; + +/** + * ObjectID class to create and handle ObjectId instances. + */ +class ObjectID { + static cacheHexString = false; + static index = Math.floor(Math.random() * 0xffffff); + + constructor(id) { + if (id instanceof ObjectID) return id; + if (!(this instanceof ObjectID)) return new ObjectID(id); + + this._bsontype = 'ObjectID'; + + if (id == null || typeof id === 'number') { + this.id = this.generate(id); + if (ObjectID.cacheHexString) this.__id = this.toHexString(); + return; + } + + if (!ObjectID.isValid(id)) { + throw new TypeError( + 'Argument passed in must be a 12-byte string, a 24-byte hex string, or a valid ObjectID instance' + ); + } + + if (typeof id === 'string') { + if (id.length === 24) { + this.id = Buffer.from(id, 'hex'); + } else if (id.length === 12) { + this.id = Buffer.from(id, 'utf8'); + } + } else if (id instanceof Buffer && id.length === 12) { + this.id = id; + } + + if (ObjectID.cacheHexString) this.__id = this.toHexString(); + } + + toHexString() { + if (ObjectID.cacheHexString && this.__id) return this.__id; + + if (!this.id || !(this.id instanceof Buffer)) { + throw new TypeError( + `Invalid ObjectId, expected a Buffer but received: ${JSON.stringify(this.id)}` + ); + } + + const hexString = this.id.toString('hex'); + if (ObjectID.cacheHexString) this.__id = hexString; + return hexString; + } + + toString() { + return this.toHexString(); + } + + toJSON() { + return this.toHexString(); + } + + equals(otherId) { + if (otherId instanceof ObjectID) { + return this.toString() === otherId.toString(); + } + + if (typeof otherId === 'string' && ObjectID.isValid(otherId)) { + return otherId.length === 24 + ? otherId.toLowerCase() === this.toHexString() + : otherId === this.id; + } + + return false; + } + + getTimestamp() { + const time = + (this.id[3] | (this.id[2] << 8) | (this.id[1] << 16) | (this.id[0] << 24)) >>> 0; + return new Date(time * 1000); + } + + generate(time) { + const buffer = Buffer.alloc(12); + time = typeof time === 'number' ? time : Math.floor(Date.now() / 1000); + + const pid = (process.pid || Math.floor(Math.random() * 0xffff)) & 0xffff; + const inc = ObjectID.index = (ObjectID.index + 1) % 0xffffff; + + buffer.writeUInt32BE(time, 0); + buffer.writeUIntBE(MACHINE_ID, 4, 3); + buffer.writeUInt16BE(pid, 7); + buffer.writeUIntBE(inc, 9, 3); + + return buffer; + } + + static createFromHexString(hexString) { + if (!HEX_REGEX.test(hexString)) { + throw new TypeError( + 'Argument passed in must be a 24-byte hex string' + ); + } + return new ObjectID(Buffer.from(hexString, 'hex')); + } + + static isValid(id) { + if (id == null) return false; + + if (typeof id === 'string') { + return id.length === 12 || (id.length === 24 && HEX_REGEX.test(id)); + } + + if (id instanceof ObjectID || (id instanceof Buffer && id.length === 12)) { + return true; + } + + return id?.toHexString && ObjectID.isValid(id.id); + } +} + +module.exports = ObjectID; diff --git a/src/util/objectidtemp.js b/src/util/objectidtemp.js new file mode 100644 index 0000000000..16b458373d --- /dev/null +++ b/src/util/objectidtemp.js @@ -0,0 +1,152 @@ +'use strict'; + +//https://github.com/mongodb/js-bson/blob/f8920c68aaa986595db04b9915301bf0e38139a2/lib/objectid.js#L174 + +const { hostname } = require('os'); +const { fnv1a24 } = require('./fnv1a'); +const { Buffer } = require('buffer'); // Ensure proper usage of Buffer + +const MACHINE_ID = fnv1a24(hostname()); +const HEX_REGEX = /^[0-9a-fA-F]{24}$/; +const HAS_BUFFER_SUPPORT = typeof Buffer !== 'undefined' && typeof Buffer.from === 'function'; +const HEX_TABLE = Array.from({ length: 256 }, (_, i) => (i <= 15 ? '0' : '') + i.toString(16)); + +/** + * ObjectID class to create and handle ObjectId instances. + */ +class ObjectID { + static cacheHexString = true; + static index = Math.floor(Math.random() * 0xffffff); + + constructor(id) { + if (id instanceof ObjectID) return id; + if (!(this instanceof ObjectID)) return new ObjectID(id); + + this._bsontype = 'ObjectID'; + + if (id == null || typeof id === 'number') { + this.id = this.generate(id); + if (ObjectID.cacheHexString) this.__id = this.toString('hex'); + return; + } + + if (!ObjectID.isValid(id)) { + throw new TypeError( + 'Argument passed in must be a 12-byte string, a 24-byte hex string, or a valid ObjectID instance' + ); + } + + if (typeof id === 'string') { + if (id.length === 24) { + this.id = HAS_BUFFER_SUPPORT ? Buffer.from(id, 'hex') : ObjectID.createFromHexString(id).id; + } else if (id.length === 12) { + this.id = id; + } + } else if (id instanceof Buffer && id.length === 12) { + this.id = id; + } else if (id?.toHexString) { + return id; + } + + if (ObjectID.cacheHexString) this.__id = this.toString('hex'); + } + + toHexString() { + if (ObjectID.cacheHexString && this.__id) return this.__id; + + if (!this.id || !(this.id instanceof Buffer || typeof this.id === 'string')) { + throw new TypeError( + `Invalid ObjectId, expected a string or Buffer but received: ${JSON.stringify(this.id)}` + ); + } + + if (this.id instanceof Buffer) { + const hexString = this.id.toString('hex'); + if (ObjectID.cacheHexString) this.__id = hexString; + return hexString; + } + + return Array.from(this.id).map(char => HEX_TABLE[char.charCodeAt(0)]).join(''); + } + + toString(format = 'hex') { + if (this.id instanceof Buffer) { + if (typeof format === 'string' && Buffer.isEncoding(format)) { + return this.id.toString(format); + } else { + throw new TypeError('Invalid encoding format provided for Buffer.toString'); + } + } + return this.toHexString(); + } + + toJSON() { + return this.toHexString(); + } + + equals(otherId) { + if (otherId instanceof ObjectID) { + return this.toString() === otherId.toString(); + } + + if (typeof otherId === 'string' && ObjectID.isValid(otherId)) { + return otherId.length === 24 + ? otherId.toLowerCase() === this.toHexString() + : otherId === this.id; + } + + if (otherId?.toHexString) { + return otherId.toHexString() === this.toHexString(); + } + + return false; + } + + getTimestamp() { + const time = + (this.id[3] | (this.id[2] << 8) | (this.id[1] << 16) | (this.id[0] << 24)) >>> 0; + return new Date(time * 1000); + } + + generate(time) { + const buffer = Buffer.alloc(12); + time = typeof time === 'number' ? time : Math.floor(Date.now() / 1000); + + const pid = (process.pid || Math.floor(Math.random() * 0xffff)) & 0xffff; + const inc = ObjectID.index = (ObjectID.index + 1) % 0xffffff; + + buffer.writeUInt32BE(time, 0); + buffer.writeUIntBE(MACHINE_ID, 4, 3); + buffer.writeUInt16BE(pid, 7); + buffer.writeUIntBE(inc, 9, 3); + + return buffer; + } + + static createFromHexString(hexString) { + if (!HEX_REGEX.test(hexString)) { + throw new TypeError( + 'Argument passed in must be a 24-byte hex string' + ); + } + return new ObjectID(Buffer.from(hexString, 'hex')); + } + + + static isValid(id) { + if (id == null) return false; + + if (typeof id === 'string') { + return id.length === 12 || (id.length === 24 && HEX_REGEX.test(id)); + } + + if (id instanceof ObjectID || (id instanceof Buffer && id.length === 12)) { + return true; + } + + return id?.toHexString && ObjectID.isValid(id.id); + } +} + +// Exports +module.exports = ObjectID; diff --git a/src/util/postgres_client.js b/src/util/postgres_client.js index da80b2da1b..7b4a413062 100644 --- a/src/util/postgres_client.js +++ b/src/util/postgres_client.js @@ -30,6 +30,9 @@ const config = require('../../config'); const ssl_utils = require('./ssl_utils'); const DB_CONNECT_ERROR_MESSAGE = 'Could not acquire client from DB connection pool'; + +const ObjectID = require('../util/objectid.js'); + mongodb.Binary.prototype[util.inspect.custom] = function custom_inspect_binary() { return ``; }; @@ -54,7 +57,7 @@ function decode_json(schema, val) { return val; } if (schema.objectid === true) { - return new mongodb.ObjectId(val); + return new ObjectID(val); } if (schema.date === true) { return new Date(val); @@ -97,7 +100,7 @@ function encode_json(schema, val) { const ops = handle_ops_encoding(schema, val); if (ops) return ops; - if (schema.objectid === true && val instanceof mongodb.ObjectID) { + if (schema.objectid === true && val instanceof ObjectID) { return val.toString(); } @@ -1162,7 +1165,7 @@ class PostgresTable { const new_row = {}; for (const key of Object.keys(row)) { if (key === '_id') { - new_row._id = new mongodb.ObjectID(row[key]); + new_row._id = new ObjectID(row[key]); } else { new_row[key] = parseInt(row[key], 10); } @@ -1585,7 +1588,7 @@ class PostgresClient extends EventEmitter { } generate_id() { - return new mongodb.ObjectId(); + return (new ObjectID(null)).toString(); } collection(name) { @@ -1688,8 +1691,8 @@ class PostgresClient extends EventEmitter { } /** - * make a list of ObjectId unique by indexing their string value - * this is needed since ObjectId is an object so === comparison is not + * make a list of ObjectID unique by indexing their string value + * this is needed since ObjectID is an object so === comparison is not * logically correct for it even for two objects with the same id. */ uniq_ids(docs, doc_path) { @@ -1732,9 +1735,9 @@ class PostgresClient extends EventEmitter { resolve_object_ids_recursive(idmap, item) { _.each(item, (val, key) => { - if (val instanceof mongodb.ObjectId) { + if (val instanceof ObjectID) { if (key !== '_id') { - const obj = idmap[val.toHexString()]; + const obj = idmap[val]; if (obj) { item[key] = obj; } @@ -1770,8 +1773,8 @@ class PostgresClient extends EventEmitter { /** * @returns {nb.ID} */ - new_object_id() { - return new mongodb.ObjectId(); + new_object_id().toString() { + return (new ObjectID(null)).toString(); } /** @@ -1779,20 +1782,20 @@ class PostgresClient extends EventEmitter { * @returns {nb.ID} */ parse_object_id(id_str) { - return new mongodb.ObjectId(String(id_str || undefined)); + return (new ObjectID(String(id_str || undefined))).toString(); } fix_id_type(doc) { if (_.isArray(doc)) { _.each(doc, d => this.fix_id_type(d)); } else if (doc && doc._id) { - doc._id = new mongodb.ObjectId(doc._id); + doc._id = new ObjectID(doc._id); } return doc; } is_object_id(id) { - return (id instanceof mongodb.ObjectId); + return (id instanceof ObjectID); } // TODO: Figure out error codes diff --git a/src/util/schema_keywords.js b/src/util/schema_keywords.js index 1deb1de3c0..e828564fd9 100644 --- a/src/util/schema_keywords.js +++ b/src/util/schema_keywords.js @@ -65,17 +65,17 @@ const KEYWORDS = js_utils.deep_freeze({ keyword: 'objectid', // schemaType: 'boolean', /** - * - * @param {KeywordCxt} cxt - * + * + * @param {KeywordCxt} cxt + * */ code(cxt) { const d = cxt.it.data; cxt.gen .if(CG ` typeof ${d} === 'object' && - ${d} && - ${d}.constructor && + ${d} && + ${d}.constructor && ${d}.constructor.name === 'ObjectID' `) .elseIf(CG `