Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/sdk/nb.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -770,6 +770,7 @@ interface DBCollection {

executeSQL<T>(query: string, params: Array<any>, options?: { query_name?: string, preferred_pool?: string }): Promise<sqlResult<T>>;
name: any;
schema: any;
}

type DBDoc = any;
Expand Down
8 changes: 6 additions & 2 deletions src/server/object_services/map_server.js
Original file line number Diff line number Diff line change
Expand Up @@ -85,8 +85,12 @@ class GetMapping {
if (!config.DEDUP_ENABLED) return;
await Promise.all(Object.values(this.chunks_per_bucket).map(async chunks => {
const bucket = chunks[0].bucket;
const dedup_keys = _.compact(_.map(chunks,
chunk => chunk.digest_b64 && Buffer.from(chunk.digest_b64, 'base64')));
const dedup_keys = [];
chunks.forEach(chunk => {
if (chunk?.digest_b64) {
dedup_keys.push(chunk.digest_b64);
}
});
if (!dedup_keys.length) return;
dbg.log0('GetMapping.find_dups: found keys', dedup_keys.length);
const dup_chunks_db = await MDStore.instance().find_chunks_by_dedup_key(bucket, dedup_keys);
Expand Down
40 changes: 23 additions & 17 deletions src/server/object_services/md_store.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ const mime = require('mime-types');
const P = require('../../util/promise');
const dbg = require('../../util/debug_module')(__filename);
const db_client = require('../../util/db_client');
const { decode_json } = require('../../util/postgres_client.js');

const mongo_functions = require('../../util/mongo_functions');
const object_md_schema = require('./schemas/object_md_schema');
Expand Down Expand Up @@ -1541,23 +1542,28 @@ class MDStore {
* @returns {Promise<nb.ChunkSchemaDB[]>}
*/
async find_chunks_by_dedup_key(bucket, dedup_keys) {
// TODO: This is temporary patch because of binary representation in MongoDB and PostgreSQL
/** @type {nb.ChunkSchemaDB[]} */
const chunks = await this._chunks.find({
system: bucket.system._id,
bucket: bucket._id,
dedup_key: {
$in: dedup_keys,
$exists: true
},
deleted: null,
}, {
sort: {
_id: -1 // get newer chunks first
}
});
await this.load_blocks_for_chunks(chunks);
return chunks;
const values = [];
let query = `SELECT * FROM ${this._chunks.name} WHERE (data ->> 'system' = $1 AND data ->> 'bucket' = $2`;
values.push(`${bucket.system._id}`, `${bucket._id}`);

if (dedup_keys.length) {
query += ` AND (data ->> 'dedup_key' = ANY($3) AND data ? 'dedup_key')`;
values.push(dedup_keys);
} else {
query += ` AND (FALSE AND data ? 'dedup_key')`;
}

query += ` AND (data->'deleted' IS NULL OR data->'deleted' = 'null'::jsonb)) ORDER BY _id DESC;`;

try {
const res = await this._chunks.executeSQL(query, values);
const chunks = res?.rows.map(row => decode_json(this._chunks.schema, row.data));
await this.load_blocks_for_chunks(chunks);
return chunks;
} catch (err) {
dbg.error('Error while finding chunks by dedup_key. error is ', err);
return [];
}
Comment on lines +1558 to +1566
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

Improve optional chaining for safety.

Line 1560 uses incomplete optional chaining that could still throw if res is undefined:

const chunks = res?.rows.map(row => decode_json(this._chunks.schema, row.data));

If res is undefined, res?.rows returns undefined, and calling .map() on undefined throws an error. Fix with:

-const chunks = res?.rows.map(row => decode_json(this._chunks.schema, row.data));
+const chunks = res?.rows?.map(row => decode_json(this._chunks.schema, row.data)) || [];

This ensures chunks is always an array, even if res or res.rows is undefined.

🤖 Prompt for AI Agents
In src/server/object_services/md_store.js around lines 1558 to 1566, the mapping
uses incomplete optional chaining which can throw if res or res.rows is
undefined; replace the mapping with a safe expression that always produces an
array (e.g. const chunks = (res?.rows?.map(row =>
decode_json(this._chunks.schema, row.data))) ?? [];), then call await
this.load_blocks_for_chunks(chunks); and return chunks so chunks is guaranteed
to be an array even when the query returns no result.

}

iterate_all_chunks_in_buckets(lower_marker, upper_marker, buckets, limit) {
Expand Down
37 changes: 37 additions & 0 deletions src/test/integration_tests/db/test_md_store.js
Original file line number Diff line number Diff line change
Expand Up @@ -380,6 +380,43 @@ mocha.describe('md_store', function() {
return md_store.delete_chunks_by_ids(_.map(chunks, '_id'));
});

mocha.it('find_chunks_by_dedup_key()', async () => {
if (config.DB_TYPE !== 'postgres') return; // feature uses SQL path
const bucket = { _id: md_store.make_md_id(), system: { _id: system_id } };
const chunk = {
_id: md_store.make_md_id(),
system: system_id,
bucket: bucket._id,
frags: [{ _id: md_store.make_md_id() }],
size: 10,
frag_size: 10,
dedup_key: Buffer.from('noobaa')
};
await md_store.insert_chunks([chunk]);
const chunksArr = await md_store.find_chunks_by_dedup_key(bucket, [Buffer.from('noobaa').toString('base64')]);
assert(Array.isArray(chunksArr));
assert(chunksArr.length >= 1);
assert(chunksArr[0].frags[0]?._id?.toString() === chunk.frags[0]._id.toString());
});

mocha.it('find_chunks_by_dedup_key empty dedup_key array passed', async () => {
if (config.DB_TYPE !== 'postgres') return; // feature uses SQL path
const bucket = { _id: md_store.make_md_id(), system: { _id: system_id } };
const chunk = {
_id: md_store.make_md_id(),
system: system_id,
bucket: bucket._id,
frags: [{ _id: md_store.make_md_id() }],
size: 10,
frag_size: 10,
dedup_key: Buffer.from('noobaa')
};
await md_store.insert_chunks([chunk]);
const chunksArr = await md_store.find_chunks_by_dedup_key(bucket, []);
assert(Array.isArray(chunksArr));
assert(chunksArr.length === 0);
});

});


Expand Down
Loading