diff --git a/server/controllers/stock/core.js b/server/controllers/stock/core.js index 2037628f90..ffdabf532a 100644 --- a/server/controllers/stock/core.js +++ b/server/controllers/stock/core.js @@ -241,6 +241,13 @@ function getLots(sqlQuery, parameters, finalClause = '', orderBy = '') { filters.setOrder(orderBy); } + if (parameters.paging) { + const FROM_INDEX = String(sql).lastIndexOf('FROM'); + const select = String(sql).substring(0, FROM_INDEX - 1); + const tables = String(sql).substring(FROM_INDEX, sql.length - 1); + return db.paginateQuery(select, parameters, tables, filters); + } + const query = filters.applyQuery(sql); const queryParameters = filters.parameters(); @@ -333,7 +340,6 @@ async function getAssets(params) { params.scan_status === 'scanned' ? 'last_scan.uuid IS NOT NULL' : 'last_scan.uuid IS NULL'); } filters.setGroup(groupByClause); - filters.setHaving(havingClause); filters.setOrder('ORDER BY i.code, l.label'); const query = filters.applyQuery(sql); @@ -427,14 +433,22 @@ async function getLotsDepot(depotUuid, params, finalClause) { `; const groupByClause = finalClause || ` GROUP BY l.uuid, m.depot_uuid ${emptyLotToken} ORDER BY i.code, l.label `; - const filters = getLotFilters(params); filters.setGroup(groupByClause); - const query = filters.applyQuery(sql); - const queryParameters = filters.parameters(); - - const resultFromProcess = await db.exec(query, queryParameters); + let resultFromProcess; + let paginatedResults; + if (params.paging) { + const FROM_INDEX = String(sql).lastIndexOf('FROM'); + const select = String(sql).substring(0, FROM_INDEX - 1); + const tables = String(sql).substring(FROM_INDEX, sql.length - 1); + paginatedResults = await db.paginateQuery(select, params, tables, filters); + resultFromProcess = paginatedResults.rows; + } else { + const query = filters.applyQuery(sql); + const queryParameters = filters.parameters(); + resultFromProcess = await db.exec(query, queryParameters); + } // add minumum delay resultFromProcess.forEach(row => { @@ -466,6 +480,13 @@ async function getLotsDepot(depotUuid, params, finalClause) { inventoriesWithLotsProcessed = inventoriesWithLotsProcessed.filter(lot => !lot.near_expiration); } + if (params.paging) { + return { + pager : paginatedResults.pager, + rows : inventoriesWithLotsProcessed, + }; + } + return inventoriesWithLotsProcessed; } @@ -956,24 +977,28 @@ async function getInventoryQuantityAndConsumption(params) { const clause = ` GROUP BY l.inventory_uuid, m.depot_uuid ${emptyLotToken} ORDER BY ig.name, i.text `; - let filteredRows = await getLots(sql, params, clause); - if (filteredRows.length === 0) { return []; } + const filteredRows = await getLots(sql, params, clause); + let filteredRowsPaged = params.paging ? filteredRows.rows : filteredRows; + + if (filteredRowsPaged.length === 0) { + return params.paging ? { ...filteredRows } : []; + } const settingsql = ` SELECT month_average_consumption, average_consumption_algo, min_delay, default_purchase_interval FROM stock_setting WHERE enterprise_id = ? `; - const opts = await db.one(settingsql, filteredRows[0].enterprise_id); + const opts = await db.one(settingsql, filteredRowsPaged[0].enterprise_id); // add the minimum delay to the rows - filteredRows.forEach(row => { + filteredRowsPaged.forEach(row => { row.min_delay = opts.min_delay; }); // add the CMM - filteredRows = await getBulkInventoryCMM( - filteredRows, + filteredRowsPaged = await getBulkInventoryCMM( + filteredRowsPaged, opts.month_average_consumption, opts.average_consumption_algo, opts.default_purchase_interval, @@ -981,14 +1006,14 @@ async function getInventoryQuantityAndConsumption(params) { ); if (_status) { - filteredRows = filteredRows.filter(row => row.status === _status); + filteredRowsPaged = filteredRowsPaged.filter(row => row.status === _status); } if (requirePurchaseOrder) { - filteredRows = filteredRows.filter(row => row.S_Q > 0); + filteredRowsPaged = filteredRowsPaged.filter(row => row.S_Q > 0); } - return filteredRows; + return params.paging ? { ...filteredRows, rows : filteredRowsPaged } : filteredRowsPaged; } /** diff --git a/server/controllers/stock/index.js b/server/controllers/stock/index.js index 610bb3c2a7..1fd4e2e036 100644 --- a/server/controllers/stock/index.js +++ b/server/controllers/stock/index.js @@ -1099,10 +1099,14 @@ async function listLotsDepot(req, res, next) { // if no data is returned or if the skipTags flag is set, we don't need to do any processing // of tags. Skip the SQL query and JS loops. - if (data.length !== 0 && !params.skipTags) { + if (!params.paging && data.length !== 0 && !params.skipTags) { await core.addLotTags(data); } + if (params.paging && data.rows.length !== 0 && !params.skipTags) { + await core.addLotTags(data.rows); + } + res.status(200).json(data); } catch (error) { next(error); @@ -1169,18 +1173,21 @@ async function listLotsDepotDetailed(req, res, next) { db.exec(sqlGetMonthlyStockMovements, [db.bid(params.depot_uuid), params.startDate, params.dateTo]), ]); - data.forEach(current => { + const dataPaged = !params.paging ? data : data.rows; + const dataPagedPreviousMonth = !params.paging ? dataPreviousMonth : dataPreviousMonth.rows; + + (dataPaged || []).forEach(current => { current.quantity_opening = 0; current.total_quantity_entry = 0; current.total_quantity_exit = 0; - dataPreviousMonth.forEach(previous => { + (dataPagedPreviousMonth || []).forEach(previous => { if (current.uuid === previous.uuid) { current.quantity_opening = previous.quantity; } }); - dataStockMovements.forEach(row => { + (dataStockMovements || []).forEach(row => { if (current.uuid === row.lot_uuid) { current.total_quantity_entry = row.entry_quantity; current.total_quantity_exit = row.exit_quantity; @@ -1196,19 +1203,19 @@ async function listLotsDepotDetailed(req, res, next) { `; // if we have an empty set, do not query tags. - if (data.length !== 0) { - const lotUuids = data.map(row => db.bid(row.uuid)); + if (dataPaged.length !== 0) { + const lotUuids = dataPaged.map(row => db.bid(row.uuid)); const tags = await db.exec(queryTags, [lotUuids]); // make a lot_uuid -> tags map. const tagMap = _.groupBy(tags, 'lot_uuid'); - data.forEach(lot => { + dataPaged.forEach(lot => { lot.tags = tagMap[lot.uuid] || []; }); } - res.status(200).json(data); + res.status(200).json(params.paging ? { pager : data.pager, rows : dataPaged } : dataPaged); } catch (error) { next(error); } diff --git a/server/lib/db/index.js b/server/lib/db/index.js index 6d62509b1a..a2c5612a1d 100644 --- a/server/lib/db/index.js +++ b/server/lib/db/index.js @@ -289,6 +289,57 @@ class DatabaseConnector { .catch(next) .done(); } + + async paginateQuery(sql, params, tables, filters) { + let pager = {}; + let rows = []; + let fetchAllData = false; + + if (!params.limit) { + params.limit = 100; + } else if (params.limit && parseInt(params.limit, 10) === -1) { + fetchAllData = true; + delete params.limit; + } + + if (params.page && parseInt(params.page, 10) === 0) { + delete params.page; + } + + const queryParameters = filters.parameters(); + + if (fetchAllData) { + // fetch all data + const query = filters.applyQuery(sql.concat(' ', tables)); + rows = await this.exec(query, queryParameters); + } else { + // paginated data + + // FIXME: Performance issue, use SQL COUNT in a better way + const total = (await this.exec(filters.getAllResultQuery(sql.concat(' ', tables)), queryParameters)).length; + const page = params.page ? parseInt(params.page, 10) : 1; + const limit = params.limit ? parseInt(params.limit, 10) : 100; + const pageCount = Math.ceil(total / limit); + pager = { + total, + page, + page_size : limit, + page_min : (page - 1) * limit, + page_max : (page) * limit, + page_count : pageCount, + }; + const paginatedQuery = filters.applyPaginationQuery(sql.concat(' ', tables), pager.page_size, pager.page_min); + rows = await this.exec(paginatedQuery, queryParameters); + if (rows.length === 0) { + // update page_min and page_max after the query + // in case of empty result + pager.page_min = null; + pager.page_max = null; + } + } + + return { rows, pager }; + } } module.exports = new DatabaseConnector(); diff --git a/server/lib/filter.js b/server/lib/filter.js index 42838f02c8..8a4e3afb1a 100644 --- a/server/lib/filter.js +++ b/server/lib/filter.js @@ -299,6 +299,55 @@ class FilterParser { return limitString; } + + /** + * pagination handler + */ + paginationLimitQuery(table, limit = 100, page = 1) { + if (this._autoParseStatements) { + this._parseDefaultFilters(); + } + + const conditionStatements = this._parseStatements(); + + return ` + SELECT + COUNT(*) AS total, + ${page} AS page, + ${limit} AS page_size, + (${(page - 1) * limit}) AS page_min, + (${(page) * limit}) AS page_max, + CEIL(COUNT(*) / ${limit}) AS page_count + ${table} + WHERE ${conditionStatements} + `; + } + + // FIXME: This strategie is temp solution to fix the pager.total compare to the rows.size + // The reason is we have to use COUNT(DISTINCT specific_column) FOR ALL OUR CASES in the above + // query + getAllResultQuery(sql) { + if (this._autoParseStatements) { + this._parseDefaultFilters(); + } + + const conditionStatements = this._parseStatements(); + const group = this._group; + + return `${sql} WHERE ${conditionStatements} ${group}`; + } + + applyPaginationQuery(sql, limit, page) { + if (this._autoParseStatements) { + this._parseDefaultFilters(); + } + + const conditionStatements = this._parseStatements(); + const order = this._order; + const group = this._group; + + return `${sql} WHERE ${conditionStatements} ${group} ${order} LIMIT ${limit} OFFSET ${page}`; + } } module.exports = FilterParser;