From 5d67c7713dde3ecde35c9cf74586c3e5e7477802 Mon Sep 17 00:00:00 2001 From: Ludek Novy <13610612+ludeknovy@users.noreply.github.com> Date: Wed, 5 Apr 2023 22:33:31 +0200 Subject: [PATCH] thresholds reworked (#213) --- .../item/shared/item-data-processing.ts | 15 +- .../item/update-item-controller.ts | 68 +++- .../utils/scenario-thresholds-calc.spec.ts | 329 ++++++++++++------ .../item/utils/scenario-thresholds-calc.ts | 104 +++--- .../scenario/get-scenario-controller.ts | 73 ++-- src/server/data-stats/prepare-data.ts | 32 +- src/server/queries/items.ts | 38 ++ 7 files changed, 464 insertions(+), 195 deletions(-) diff --git a/src/server/controllers/item/shared/item-data-processing.ts b/src/server/controllers/item/shared/item-data-processing.ts index 9a89dcf5..6a390aff 100644 --- a/src/server/controllers/item/shared/item-data-processing.ts +++ b/src/server/controllers/item/shared/item-data-processing.ts @@ -23,10 +23,10 @@ import { calculateApdexValues, updateItemApdexSettings, chartOverviewStatusCodesQuery, - responseTimePerLabelHistogram, findRawData, + responseTimePerLabelHistogram, findRawData, getBaselineItemWithStats, } from "../../../queries/items" import { ReportStatus } from "../../../queries/items.model" -import { getScenarioSettings, currentScenarioMetrics } from "../../../queries/scenario" +import { getScenarioSettings } from "../../../queries/scenario" import { sendNotifications } from "../../../utils/notifications/send-notification" import { scenarioThresholdsCalc } from "../utils/scenario-thresholds-calc" import { extraIntervalMilliseconds } from "./extra-intervals-mapping" @@ -122,10 +122,13 @@ export const itemDataProcessing = async ({ projectName, scenarioName, itemId }) overview.maxVu = Math.max(...chartData.threads.map(([, vu]) => vu)) if (scenarioSettings.thresholdEnabled) { - const scenarioMetrics = await db.one(currentScenarioMetrics(projectName, scenarioName, overview.maxVu)) - const thresholdResult = scenarioThresholdsCalc(overview, scenarioMetrics, scenarioSettings) - if (thresholdResult) { - await db.none(saveThresholdsResult(projectName, scenarioName, itemId, thresholdResult)) + logger.info("threshold comparison enabled, fetching baseline report") + const baselineReport = await db.oneOrNone(getBaselineItemWithStats(projectName, scenarioName)) + if (baselineReport) { + const thresholdResult = scenarioThresholdsCalc(labelStats, baselineReport.stats, scenarioSettings) + if (thresholdResult) { + await db.none(saveThresholdsResult(projectName, scenarioName, itemId, thresholdResult)) + } } } diff --git a/src/server/controllers/item/update-item-controller.ts b/src/server/controllers/item/update-item-controller.ts index e2b6f430..fa7c302f 100644 --- a/src/server/controllers/item/update-item-controller.ts +++ b/src/server/controllers/item/update-item-controller.ts @@ -1,23 +1,61 @@ import { Request, Response, NextFunction } from "express" -import { updateTestItemInfo, removeCurrentBaseFlag, setBaseFlag } from "../../queries/items" +import { + updateTestItemInfo, + removeCurrentBaseFlag, + setBaseFlag, + findItemStats, saveThresholdsResult, findItemsWithThresholds, +} from "../../queries/items" import { db } from "../../../db/db" import { StatusCode } from "../../utils/status-code" +import { scenarioThresholdsCalc } from "./utils/scenario-thresholds-calc" +import { getScenarioSettings } from "../../queries/scenario" +import { logger } from "../../../logger" export const updateItemController = async (req: Request, res: Response, next: NextFunction) => { - const { projectName, scenarioName, itemId } = req.params - const { note, environment, hostname, base, name } = req.body - try { - await db.query("BEGIN") - await db.none(updateTestItemInfo(itemId, scenarioName, projectName, note, environment, hostname, name)) - if (base) { - await db.none(removeCurrentBaseFlag(scenarioName, projectName)) - await db.none(setBaseFlag(itemId, scenarioName, projectName)) + const { projectName, scenarioName, itemId } = req.params + const { note, environment, hostname, base, name } = req.body + try { + await db.query("BEGIN") + await db.none(updateTestItemInfo(itemId, scenarioName, projectName, note, environment, hostname, name)) + if (base) { + await db.none(removeCurrentBaseFlag(scenarioName, projectName)) + await db.none(setBaseFlag(itemId, scenarioName, projectName)) + const scenarioSettings = await db.one(getScenarioSettings(projectName, scenarioName)) + if (scenarioSettings.thresholdEnabled) { + logger.info("Thresholds enabled, searching for any connected reports") + // items + const itemsWithThresholds = await db.manyOrNone(findItemsWithThresholds(projectName, scenarioName)) + + if (itemsWithThresholds && itemsWithThresholds.length > 0) { + logger.info("Items with thresholds that need to be updated found") + const { stats: newBaseLineReport } = await db.one(findItemStats(itemId)) + + // eslint-disable-next-line max-depth + for (const itemIdToBeRecalculated of itemsWithThresholds) { + logger.info(`About to re-calculate threshold for item: ${itemIdToBeRecalculated.id}`) + + const { stats: statsToBeRecalculatedItem } = await db.one( + findItemStats(itemIdToBeRecalculated.id)) + + const updatedThreshold = scenarioThresholdsCalc(statsToBeRecalculatedItem, + newBaseLineReport, scenarioSettings) + // eslint-disable-next-line max-depth + if (updatedThreshold) { + // eslint-disable-next-line max-len + logger.info(`About to save re-calculated threshold values for item: ${itemIdToBeRecalculated.id}`) + await db.none(saveThresholdsResult(projectName, scenarioName, + itemIdToBeRecalculated.id, updatedThreshold)) + } + } + } + } + + } + await db.query("COMMIT") + res.status(StatusCode.NoContent).send() + } catch(error) { + await db.query("ROLLBACK") + return next(error) } - await db.query("COMMIT") - res.status(StatusCode.NoContent).send() - } catch(error) { - await db.query("ROLLBACK") - return next(error) - } } diff --git a/src/server/controllers/item/utils/scenario-thresholds-calc.spec.ts b/src/server/controllers/item/utils/scenario-thresholds-calc.spec.ts index 3b1cd4b4..cf9dedda 100644 --- a/src/server/controllers/item/utils/scenario-thresholds-calc.spec.ts +++ b/src/server/controllers/item/utils/scenario-thresholds-calc.spec.ts @@ -1,107 +1,238 @@ import { scenarioThresholdsCalc } from "./scenario-thresholds-calc" +import { LabelStats } from "../../../data-stats/prepare-data" describe("scenarioThresholdsCalc", () => { - const SCENARIO_AVERAGE = { - percentile: "35.33", - throughput: "462.39", - errorRate: "0.0", - } - it("should return false, when response time threshold exceeded", () => { - const output = scenarioThresholdsCalc({ - percentil: 40, - errorRate: 0.0, - throughput: 462.39, - } as any, SCENARIO_AVERAGE, { - errorRate: "5", percentile: "5", throughput: "5", - }) - expect(output.passed).toBe(false) - expect(output.result.percentile.passed).toBe(false) - }) - it("should return true, when response time threshold not exceeded", () => { - const output = scenarioThresholdsCalc({ - percentil: 36, - errorRate: 0.0, - throughput: 462.39, - } as any, SCENARIO_AVERAGE, { - errorRate: "5", percentile: "5", throughput: "5", - }) - expect(output.passed).toBe(true) - expect(output.result.percentile.passed).toBe(true) - - }) - it("should return false, when throughput threshold exceeded", () => { - const output = scenarioThresholdsCalc({ - percentil: 35, - errorRate: 0.0, - throughput: 435, - } as any, SCENARIO_AVERAGE, { - errorRate: "5", percentile: "5", throughput: "5", - }) - expect(output.passed).toBe(false) - expect(output.result.throughput.passed).toBe(false) - }) - it("should return true, when throughput threshold not exceeded", () => { - const output = scenarioThresholdsCalc({ - percentil: 35, - errorRate: 0.0, - throughput: 450, - } as any, SCENARIO_AVERAGE, { - errorRate: "5", percentile: "5", throughput: "5", - }) - expect(output.passed).toBe(true) - expect(output.result.throughput.passed).toBe(true) - - }) - it("should return false, when errorRate threshold exceeded and scenario average is zero", () => { - const output = scenarioThresholdsCalc({ - percentil: 35, - errorRate: 10.0, - throughput: 462.39, - } as any, SCENARIO_AVERAGE, { - errorRate: "5", percentile: "5", throughput: "5", - }) - expect(output.passed).toBe(false) - expect(output.result.errorRate.passed).toBe(false) - }) - it("should return false, when errorRate threshold exceeded and scenario average is not zero", () => { - const output = scenarioThresholdsCalc({ - percentil: 35, - errorRate: 10.0, - throughput: 462.39, - } as any, { - percentile: "35.33", - throughput: "462.39", - errorRate: "1.0", + const BASELINE_REPORT_STATS: LabelStats[] = [{ + label: "label1", + n0: 70, + n5: 100, + n9: 100, + samples: 100, + latency: 2, + connect: 4, + standardDeviation: 3.3, + bytesPerSecond: 100, + bytesSentPerSecond: 100, + statusCodes: [{ statusCode: "200", count: 100 }], + throughput: 200, + errorRate: 0.0, + avgResponseTime: 100, + minResponseTime: 1, + maxResponseTime: 100, + medianResponseTime: 40, + responseMessageFailures: null, + apdex: {}, + }, { + label: "label2", + n0: 30, + n5: 100, + n9: 88, + samples: 100, + latency: 2, + connect: 4, + standardDeviation: 3.3, + bytesPerSecond: 100, + bytesSentPerSecond: 100, + statusCodes: [{ statusCode: "200", count: 100 }], + throughput: 200, + errorRate: 10.0, + avgResponseTime: 100, + minResponseTime: 1, + maxResponseTime: 100, + medianResponseTime: 40, + responseMessageFailures: null, + apdex: {}, }, { - errorRate: "5", percentile: "5", throughput: "5", + label: "label3", + n0: 100, + n5: 100, + n9: 88, + samples: 100, + latency: 2, + connect: 4, + standardDeviation: 3.3, + bytesPerSecond: 100, + bytesSentPerSecond: 100, + statusCodes: [{ statusCode: "200", count: 100 }], + throughput: 100, + errorRate: 0.0, + avgResponseTime: 100, + minResponseTime: 1, + maxResponseTime: 100, + medianResponseTime: 40, + responseMessageFailures: null, + apdex: {}, + }, + ] + it("should return correct values for thresholds", () => { + const labelData: LabelStats[] = [{ + label: "label1", + n0: 100, + n5: 100, + n9: 100, + samples: 100, + latency: 2, + connect: 4, + standardDeviation: 3.3, + bytesPerSecond: 100, + bytesSentPerSecond: 100, + statusCodes: [{ statusCode: "200", count: 100 }], + throughput: 100, + errorRate: 10.0, + avgResponseTime: 100, + minResponseTime: 1, + maxResponseTime: 100, + medianResponseTime: 40, + responseMessageFailures: null, + apdex: {}, + }, { + label: "label2", + n0: 100, + n5: 100, + n9: 88, + samples: 100, + latency: 2, + connect: 4, + standardDeviation: 3.3, + bytesPerSecond: 100, + bytesSentPerSecond: 100, + statusCodes: [{ statusCode: "200", count: 100 }], + throughput: 88, + errorRate: 5.0, + avgResponseTime: 100, + minResponseTime: 1, + maxResponseTime: 100, + medianResponseTime: 40, + responseMessageFailures: null, + apdex: {}, + }, { + label: "label3", + n0: 95, + n5: 100, + n9: 88, + samples: 100, + latency: 2, + connect: 4, + standardDeviation: 3.3, + bytesPerSecond: 100, + bytesSentPerSecond: 100, + statusCodes: [{ statusCode: "200", count: 100 }], + throughput: 100, + errorRate: 0.0, + avgResponseTime: 100, + minResponseTime: 1, + maxResponseTime: 100, + medianResponseTime: 40, + responseMessageFailures: null, + apdex: {}, + }] + const output = scenarioThresholdsCalc(labelData, BASELINE_REPORT_STATS, { + errorRate: "5", percentile: "5", throughput: "5", + }) + expect(output.passed).toBe(false) + expect(output.results).toEqual([{ + label: "label1", passed: false, result: { + errorRate: { passed: false, diffValue: 110 }, + percentile: { passed: false, diffValue: 142.85714285714286 }, + throughput: { passed: false, diffValue: 50 }, + }, + }, { + label: "label2", passed: false, result: { + errorRate: { passed: true, diffValue: 50 }, + percentile: { passed: false, diffValue: 333.33333333333337 }, + throughput: { passed: false, diffValue: 44 }, + }, + }, { + label: "label3", passed: true, result: { + errorRate: { passed: true, diffValue: 100 }, + percentile: { passed: true, diffValue: 95 }, + throughput: { passed: true, diffValue: 100 }, + }, + }, + ]) }) - expect(output.passed).toBe(false) - expect(output.result.errorRate.passed).toBe(false) - }) - it("should return true, when errorRate threshold not exceeded and scenario average is zero", () => { - const output = scenarioThresholdsCalc({ - percentil: 35, - errorRate: 4.0, - throughput: 462.39, - } as any, SCENARIO_AVERAGE, { - errorRate: "5", percentile: "5", throughput: "5", + it("should return undefined when no scenario settings provided", function () { + const labelData: LabelStats[] = [{ + label: "label1", + n0: 100, + n5: 100, + n9: 100, + samples: 100, + latency: 2, + connect: 4, + standardDeviation: 3.3, + bytesPerSecond: 100, + bytesSentPerSecond: 100, + statusCodes: [{ statusCode: "200", count: 100 }], + throughput: 100, + errorRate: 10.0, + avgResponseTime: 100, + minResponseTime: 1, + maxResponseTime: 100, + medianResponseTime: 40, + responseMessageFailures: null, + apdex: {}, + }, { + label: "label2", + n0: 100, + n5: 100, + n9: 88, + samples: 100, + latency: 2, + connect: 4, + standardDeviation: 3.3, + bytesPerSecond: 100, + bytesSentPerSecond: 100, + statusCodes: [{ statusCode: "200", count: 100 }], + throughput: 88, + errorRate: 0.0, + avgResponseTime: 100, + minResponseTime: 1, + maxResponseTime: 100, + medianResponseTime: 40, + responseMessageFailures: null, + apdex: {}, + }] + const output = scenarioThresholdsCalc(labelData, BASELINE_REPORT_STATS, {}) + expect(output).not.toBeDefined() }) - expect(output.passed).toBe(true) - expect(output.result.errorRate.passed).toBe(true) - }) - it("should return true, when errorRate threshold not exceeded and scenario average is not zero", () => { - const output = scenarioThresholdsCalc({ - percentil: 35, - errorRate: 0.0, - throughput: 462.39, - } as any, { - percentile: "35.33", - throughput: "462.39", - errorRate: "1.0", - }, { - errorRate: "5", percentile: "5", throughput: "5", + it("should return default values when label not found it baseline report", function () { + const labelData: LabelStats[] = [{ + label: "labelNotFound", + n0: 100, + n5: 100, + n9: 100, + samples: 100, + latency: 2, + connect: 4, + standardDeviation: 3.3, + bytesPerSecond: 100, + bytesSentPerSecond: 100, + statusCodes: [{ statusCode: "200", count: 100 }], + throughput: 100, + errorRate: 10.0, + avgResponseTime: 100, + minResponseTime: 1, + maxResponseTime: 100, + medianResponseTime: 40, + responseMessageFailures: null, + apdex: {}, + }] + const output = scenarioThresholdsCalc(labelData, BASELINE_REPORT_STATS, { + errorRate: "5", percentile: "5", throughput: "5", + }) + expect(output).toEqual({ + passed: true, + results: [{ + label: "labelNotFound", passed: true, result: { + errorRate: { diffValue: null, passed: null }, + percentile: { diffValue: null, passed: null }, + throughput: { diffValue: null, passed: null }, + }, + }], + thresholds: { errorRate: "5", percentile: "5", throughput: "5" }, + } + ) + }) - expect(output.passed).toBe(true) - expect(output.result.errorRate.passed).toBe(true) - }) }) diff --git a/src/server/controllers/item/utils/scenario-thresholds-calc.ts b/src/server/controllers/item/utils/scenario-thresholds-calc.ts index 94173d8b..573bf8d6 100644 --- a/src/server/controllers/item/utils/scenario-thresholds-calc.ts +++ b/src/server/controllers/item/utils/scenario-thresholds-calc.ts @@ -1,47 +1,73 @@ -import { Overview } from "../../../data-stats/prepare-data" +import { LabelStats } from "../../../data-stats/prepare-data" import { divide } from "mathjs" const PERC = 100 // eslint-disable-next-line max-len -export const scenarioThresholdsCalc = (overviewData: Overview, scenarioMetrics: Thresholds, thresholds: Thresholds) => { - if (!scenarioMetrics.errorRate || !scenarioMetrics.percentile || !scenarioMetrics.throughput) { - return undefined - } - const percentileDiff = (overviewData.percentil / parseFloat(scenarioMetrics.percentile)) * PERC - const throughputDiff = (overviewData.throughput / parseFloat(scenarioMetrics.throughput)) * PERC - const errorRateDiff = parseFloat(scenarioMetrics.errorRate) === 0 - ? PERC + overviewData.errorRate - : divide(overviewData.errorRate, scenarioMetrics.errorRate as unknown as number) * PERC +export const scenarioThresholdsCalc = (labelStats: LabelStats[], baselineReportStats: LabelStats[], scenarioSettings) => { + const results = [] + if (!scenarioSettings.errorRate || !scenarioSettings.percentile || !scenarioSettings.throughput) { + return undefined + } - const percentilePass = percentileDiff < (PERC + parseFloat(thresholds.percentile)) - const errorRatePass = errorRateDiff < (PERC + parseFloat(thresholds.errorRate)) - const throughputPass = throughputDiff >= (PERC - parseFloat(thresholds.throughput)) - - return { - passed: percentilePass && throughputPass && errorRatePass, - result: { - percentile: { - passed: percentilePass, - diffValue: percentileDiff, - }, - throughput: { - passed: throughputPass, - diffValue: throughputDiff, - }, - errorRate: { - passed: errorRatePass, - diffValue: errorRateDiff, - }, - }, - scenarioMetrics, - thresholds, - } + labelStats.forEach(value => { + const baselineLabelStats = baselineReportStats.find(baselineValue => baselineValue.label === value.label) + if (baselineLabelStats) { + const percentileDiff = (value.n0 / baselineLabelStats.n0) * PERC + const throughputDiff = (value.throughput / baselineLabelStats.throughput) * PERC + const errorRateDiff = baselineLabelStats.errorRate === 0 + ? PERC + value.errorRate + : divide(value.errorRate, baselineLabelStats.errorRate as unknown as number) * PERC + const percentilePass = percentileDiff < (PERC + parseFloat(scenarioSettings.percentile)) + const errorRatePass = errorRateDiff < (PERC + parseFloat(scenarioSettings.errorRate)) + const throughputPass = throughputDiff >= (PERC - parseFloat(scenarioSettings.throughput)) + results.push({ + passed: percentilePass && throughputPass && errorRatePass, + label: value.label, + result: { + percentile: { + passed: percentilePass, + diffValue: percentileDiff, + }, + throughput: { + passed: throughputPass, + diffValue: throughputDiff, + }, + errorRate: { + passed: errorRatePass, + diffValue: errorRateDiff, + }, + }, + }) + } else { + results.push({ + passed: true, + label: value.label, + result: { + percentile: { + passed: null, + diffValue: null, + }, + throughput: { + passed: null, + diffValue: null, + }, + errorRate: { + passed: null, + diffValue: null, + }, + }, + }) + } + }) + return { + passed: results.every(result => result.passed), + results, + thresholds: { + errorRate: scenarioSettings.errorRate, + throughput: scenarioSettings.throughput, + percentile: scenarioSettings.percentile, + }, + } } - -interface Thresholds { - percentile: T - throughput: T - errorRate: T -} diff --git a/src/server/controllers/scenario/get-scenario-controller.ts b/src/server/controllers/scenario/get-scenario-controller.ts index 034a5e6b..bf0c7226 100644 --- a/src/server/controllers/scenario/get-scenario-controller.ts +++ b/src/server/controllers/scenario/get-scenario-controller.ts @@ -3,44 +3,47 @@ import { db } from "../../../db/db" import { getScenario, getUserScenarioSettings } from "../../queries/scenario" import { StatusCode } from "../../utils/status-code" import { IGetUserAuthInfoRequest } from "../../middleware/request.model" +import { getBaselineItem } from "../../queries/items" const defaultRequestStatsSettings = { - samples: true, - avg: true, - min: true, - max: true, - p90: true, - p95: true, - p99: true, - throughput: true, - network: true, - errorRate: true, + samples: true, + avg: true, + min: true, + max: true, + p90: true, + p95: true, + p99: true, + throughput: true, + network: true, + errorRate: true, } export const getScenarioController = async (req: IGetUserAuthInfoRequest, res: Response) => { - const { projectName, scenarioName } = req.params - const { userId } = req.user - const scenario = await db.oneOrNone(getScenario(projectName, scenarioName)) - const userScenarioSettings = await db.oneOrNone(getUserScenarioSettings(projectName, scenarioName, userId)) - res.status(StatusCode.Ok).json({ - name: scenario.name, - analysisEnabled: scenario.analysis_enabled, - zeroErrorToleranceEnabled: scenario.zero_error_tolerance_enabled, - deleteSamples: scenario.delete_samples, - keepTestRunsPeriod: scenario.keep_test_runs_period, - generateShareToken: scenario.generate_share_token, - extraAggregations: scenario.extra_aggregations, - thresholds: { - enabled: scenario.threshold_enabled, - percentile: scenario.threshold_percentile, - throughput: scenario.threshold_throughput, - errorRate: scenario.threshold_error_rate, - }, - labelFilterSettings: scenario.label_filter_settings, - labelTrendChartSettings: scenario.label_trend_chart_settings, - userSettings: { - requestStats: userScenarioSettings?.request_stats_settings || defaultRequestStatsSettings, - }, - apdexSettings: scenario.apdex_settings, - }) + const { projectName, scenarioName } = req.params + const { userId } = req.user + const scenario = await db.oneOrNone(getScenario(projectName, scenarioName)) + const baseline = await db.oneOrNone(getBaselineItem(projectName, scenarioName)) + const userScenarioSettings = await db.oneOrNone(getUserScenarioSettings(projectName, scenarioName, userId)) + res.status(StatusCode.Ok).json({ + name: scenario.name, + analysisEnabled: scenario.analysis_enabled, + zeroErrorToleranceEnabled: scenario.zero_error_tolerance_enabled, + deleteSamples: scenario.delete_samples, + keepTestRunsPeriod: scenario.keep_test_runs_period, + generateShareToken: scenario.generate_share_token, + extraAggregations: scenario.extra_aggregations, + thresholds: { + enabled: scenario.threshold_enabled, + percentile: scenario.threshold_percentile, + throughput: scenario.threshold_throughput, + errorRate: scenario.threshold_error_rate, + }, + labelFilterSettings: scenario.label_filter_settings, + labelTrendChartSettings: scenario.label_trend_chart_settings, + userSettings: { + requestStats: userScenarioSettings?.request_stats_settings || defaultRequestStatsSettings, + }, + apdexSettings: scenario.apdex_settings, + baselineReport: baseline?.id || null, + }) } diff --git a/src/server/data-stats/prepare-data.ts b/src/server/data-stats/prepare-data.ts index 6d88bacf..f80eaf51 100644 --- a/src/server/data-stats/prepare-data.ts +++ b/src/server/data-stats/prepare-data.ts @@ -6,7 +6,7 @@ import { shouldSkipLabel } from "../controllers/item/utils/labelFilter" // eslint-disable-next-line max-len export const prepareDataForSavingToDb = (overviewData, labelData, sutStats, statusCodeDistr: StatusCodeDistribution[], responseFailures: ResponseMessageFailures[], apdex: Apdex[]): - { overview: Overview; labelStats; sutOverview: Array> } => { + { overview: Overview; labelStats: LabelStats[]; sutOverview: Array> } => { try { const startDate = new Date(overviewData.start) const endDate = new Date(overviewData.end) @@ -433,3 +433,33 @@ interface ResponseTimeHistogram { histogram: number[] label: string } + +export interface LabelStats { + label: string + samples: number + avgResponseTime: number + medianResponseTime: number + latency: number + connect: number + standardDeviation: number + minResponseTime: number + maxResponseTime: number + errorRate: number + bytesPerSecond: number + bytesSentPerSecond: number + throughput: number + n9: number + n5: number + n0: number + statusCodes: [{ statusCode: string; count: number }] + responseMessageFailures: [{ + responseMessage: string + count: number + statusCode: string + failureMessage: string + }] + apdex: { + toleration?: number + satisfaction?: number + } +} diff --git a/src/server/queries/items.ts b/src/server/queries/items.ts index 7b65fafa..4463c4ee 100644 --- a/src/server/queries/items.ts +++ b/src/server/queries/items.ts @@ -346,6 +346,7 @@ export const charLabelQuery = (interval, item_id) => { percentile_cont(0.95) within group (order by (samples.elapsed))::real as n95, percentile_cont(0.90) within group (order by (samples.elapsed))::real as n90, percentile_cont(0.50) within group (order by (samples.elapsed))::real as n50, + ROUND(STDDEV(samples.elapsed), 2)::real AS standard_deviation, MIN(samples.elapsed)::real as min_response, MAX(samples.elapsed)::real as max_response, EXTRACT(EPOCH FROM (MAX(samples.timestamp) - MIN(samples.timestamp))) as interval, @@ -559,3 +560,40 @@ export const updateItemStatus = (itemId, status) => { values: [itemId, status], } } + +export const getBaselineItem = (projectName, scenarioName) => { + return { + text: `SELECT it.id FROM jtl.items as it + LEFT JOIN jtl.scenario as s ON s.id = it.scenario_id + LEFT JOIN jtl.projects as p ON p.id = s.project_id + WHERE s.name = $2 + AND p.project_name = $1 + AND it.base is true`, + values: [projectName, scenarioName], + } +} + +export const getBaselineItemWithStats = (projectName, scenarioName) => { + return { + text: `SELECT st.stats FROM jtl.items as it + LEFT JOIN jtl.item_stat as st ON it.id = st.item_id + LEFT JOIN jtl.scenario as s ON s.id = it.scenario_id + LEFT JOIN jtl.projects as p ON p.id = s.project_id + WHERE s.name = $2 + AND p.project_name = $1 + AND it.base is true`, + values: [projectName, scenarioName], + } +} + +export const findItemsWithThresholds = (projectName, scenarioName) => { + return { + text: `SELECT it.id FROM jtl.items as it + LEFT JOIN jtl.scenario as s ON s.id = it.scenario_id + LEFT JOIN jtl.projects as p ON p.id = s.project_id + WHERE p.project_name = $1 + AND s.name = $2 + AND it.threshold_result is not null`, + values: [projectName, scenarioName], + } +}