From f8cb7cca8b69b3ec831ec595900ecb316860fe2e Mon Sep 17 00:00:00 2001 From: Mike Shi Date: Sun, 13 Apr 2025 19:39:59 -0700 Subject: [PATCH 1/3] feat: denoise search results --- packages/app/src/DBSearchPage.tsx | 55 ++-- packages/app/src/components/DBRowTable.tsx | 130 +++++++-- .../src/components/DBSearchPageFilters.tsx | 24 ++ packages/app/src/components/PatternTable.tsx | 223 +------------- packages/app/src/hooks/usePatterns.tsx | 275 ++++++++++++++++++ packages/app/src/hooks/useRowWhere.tsx | 6 +- 6 files changed, 466 insertions(+), 247 deletions(-) create mode 100644 packages/app/src/hooks/usePatterns.tsx diff --git a/packages/app/src/DBSearchPage.tsx b/packages/app/src/DBSearchPage.tsx index d22c03dce..cf186bccb 100644 --- a/packages/app/src/DBSearchPage.tsx +++ b/packages/app/src/DBSearchPage.tsx @@ -503,6 +503,18 @@ function DBSearchPage() { } }, [analysisMode, setIsLive]); + const [denoiseResults, _setDenoiseResults] = useQueryState( + 'denoise', + parseAsBoolean.withDefault(false), + ); + const setDenoiseResults = useCallback( + (value: boolean) => { + setIsLive(false); + _setDenoiseResults(value); + }, + [setIsLive, _setDenoiseResults], + ); + const { control, watch, @@ -719,11 +731,11 @@ function DBSearchPage() { const onTableScroll = useCallback( (scrollTop: number) => { // If the user scrolls a bit down, kick out of live mode - if (scrollTop > 16) { + if (scrollTop > 16 && isLive) { setIsLive(false); } }, - [setIsLive], + [isLive, setIsLive], ); const onRowExpandClick = useCallback( @@ -1245,6 +1257,8 @@ function DBSearchPage() { > ) : ( <> - {shouldShowLiveModeHint && analysisMode === 'results' && ( -
+ {shouldShowLiveModeHint && + analysisMode === 'results' && + denoiseResults != true && (
- + +
- - )} + )} {chartConfig && dbSqlRowTableConfig && analysisMode === 'results' && ( @@ -1563,6 +1583,7 @@ function DBSearchPage() { queryKeyPrefix={QUERY_KEY_PREFIX} onScroll={onTableScroll} onError={handleTableError} + denoiseResults={denoiseResults} /> )} diff --git a/packages/app/src/components/DBRowTable.tsx b/packages/app/src/components/DBRowTable.tsx index 9243555ac..9266f162d 100644 --- a/packages/app/src/components/DBRowTable.tsx +++ b/packages/app/src/components/DBRowTable.tsx @@ -27,7 +27,13 @@ import { } from '@hyperdx/common-utils/dist/types'; import { splitAndTrimWithBracket } from '@hyperdx/common-utils/dist/utils'; import { Box, Code, Flex, Text } from '@mantine/core'; -import { FetchNextPageOptions } from '@tanstack/react-query'; +import { + FetchNextPageOptions, + keepPreviousData, + QueryClient, + useQuery, + useQueryClient, +} from '@tanstack/react-query'; import { ColumnDef, ColumnResizeMode, @@ -41,6 +47,7 @@ import { useVirtualizer } from '@tanstack/react-virtual'; import { useTableMetadata } from '@/hooks/useMetadata'; import useOffsetPaginatedQuery from '@/hooks/useOffsetPaginatedQuery'; +import { useGroupedPatterns } from '@/hooks/usePatterns'; import useRowWhere from '@/hooks/useRowWhere'; import { UNDEFINED_WIDTH } from '@/tableUtils'; import { FormatTime } from '@/useFormatTime'; @@ -900,6 +907,7 @@ export function DBSqlRowTable({ isLive = false, queryKeyPrefix, onScroll, + denoiseResults = false, }: { config: ChartConfigWithDateRange; onRowExpandClick?: (where: string) => void; @@ -909,6 +917,7 @@ export function DBSqlRowTable({ isLive?: boolean; onScroll?: (scrollTop: number) => void; onError?: (error: Error | ClickHouseQueryError) => void; + denoiseResults?: boolean; }) { const mergedConfig = useConfigWithPrimaryAndPartitionKey(config); @@ -964,7 +973,7 @@ export function DBSqlRowTable({ }); return newRow; }); - }, [data, objectTypeColumns]); + }, [data, objectTypeColumns, columnMap]); const aliasMap = chSqlToAliasMap(data?.chSql ?? { sql: '', params: {} }); @@ -983,23 +992,106 @@ export function DBSqlRowTable({ } }, [isError, onError, error]); + const patternColumn = columns[columns.length - 1]; + const groupedPatterns = useGroupedPatterns({ + config, + samples: 10_000, + bodyValueExpression: patternColumn ?? '', + totalCount: undefined, + enabled: denoiseResults, + }); + const noisyPatterns = useQuery({ + queryKey: ['noisy-patterns', config], + queryFn: async () => { + return Object.values(groupedPatterns.data).filter( + p => p.count / (groupedPatterns.sampledRowCount ?? 1) > 0.1, + ); + }, + enabled: + denoiseResults && + groupedPatterns.data != null && + Object.values(groupedPatterns.data).length > 0 && + groupedPatterns.miner != null, + }); + const noisyPatternIds = useMemo(() => { + return noisyPatterns.data?.map(p => p.id) ?? []; + }, [noisyPatterns.data]); + + const queryClient = useQueryClient(); + + const denoisedRows = useQuery({ + queryKey: [ + 'denoised-rows', + config, + processedRows, + noisyPatternIds, + patternColumn, + ], + queryFn: async () => { + const matchedLogs = await groupedPatterns.miner?.matchLogs( + processedRows.map(row => row[patternColumn]), + ); + return processedRows.filter((row, i) => { + const match = matchedLogs?.[i]; + return !noisyPatternIds.includes(`${match}`); + }); + }, + placeholderData: (previousData, previousQuery) => { + // If it's the same search, but new data, return the previous data while we load + if ( + previousQuery?.queryKey?.[0] === 'denoised-rows' && + previousQuery?.queryKey?.[1] === config + ) { + return previousData; + } + return undefined; + }, + enabled: + processedRows.length > 0 && + groupedPatterns.miner != null && + noisyPatternIds.length > 0, + }); + + const isLoading = denoiseResults + ? isFetching || + denoisedRows.isFetching || + noisyPatterns.isFetching || + groupedPatterns.isLoading + : isFetching; + return ( - + <> + {denoiseResults && ( + + + Removed Noisy Event Patterns + + + {noisyPatterns.data?.map(p => ( + + {p.pattern} + + ))} + + + )} + + ); } diff --git a/packages/app/src/components/DBSearchPageFilters.tsx b/packages/app/src/components/DBSearchPageFilters.tsx index 387408bac..9ad57cdea 100644 --- a/packages/app/src/components/DBSearchPageFilters.tsx +++ b/packages/app/src/components/DBSearchPageFilters.tsx @@ -326,6 +326,8 @@ export const DBSearchPageFilters = ({ setAnalysisMode, sourceId, showDelta, + denoiseResults, + setDenoiseResults, }: { analysisMode: 'results' | 'delta' | 'pattern'; setAnalysisMode: (mode: 'results' | 'delta' | 'pattern') => void; @@ -333,6 +335,8 @@ export const DBSearchPageFilters = ({ chartConfig: ChartConfigWithDateRange; sourceId?: string; showDelta: boolean; + denoiseResults: boolean; + setDenoiseResults: (denoiseResults: boolean) => void; } & FilterStateHook) => { const { toggleFilterPin, isFilterPinned } = usePinnedFilters( sourceId ?? null, @@ -489,6 +493,26 @@ export const DBSearchPageFilters = ({ )} + + + Denoise Results + + + } + onChange={() => setDenoiseResults(!denoiseResults)} + /> + {isLoading || isFacetsLoading ? ( diff --git a/packages/app/src/components/PatternTable.tsx b/packages/app/src/components/PatternTable.tsx index 087dba8d9..af8bffe6b 100644 --- a/packages/app/src/components/PatternTable.tsx +++ b/packages/app/src/components/PatternTable.tsx @@ -1,131 +1,11 @@ import { useMemo } from 'react'; -import stripAnsi from 'strip-ansi'; import { ChartConfigWithDateRange } from '@hyperdx/common-utils/dist/types'; -import { useQuery } from '@tanstack/react-query'; -import { - convertDateRangeToGranularityString, - timeBucketByGranularity, - toStartOfInterval, -} from '@/ChartUtils'; -import { useQueriedChartConfig } from '@/hooks/useChartConfig'; - -import { - RawLogTable, - selectColumnMapWithoutAdditionalKeys, - useConfigWithPrimaryAndPartitionKey, -} from './DBRowTable'; -import { useSearchTotalCount } from './SearchTotalCountChart'; - -// We don't want to load pyodide over and over again, use react query to cache the async instance -function usePyodide() { - return useQuery({ - queryKey: ['pyodide'], - queryFn: async () => { - // @ts-ignore - const pyodide = await window.loadPyodide(); - await pyodide.loadPackage('micropip'); - const micropip = pyodide.pyimport('micropip'); - const url = new URL( - '/drain3-0.9.11-py3-none-any.whl', - window.location.origin, - ); - await micropip.install(url.href); - return pyodide; - }, - refetchOnWindowFocus: false, - refetchOnMount: false, - refetchInterval: false, - }); -} - -async function mineEventPatterns(logs: string[], pyodide: any) { - pyodide.globals.set('HDXLOGS', logs); - return JSON.parse( - await pyodide.runPythonAsync(` -import js -import json -from drain3 import TemplateMiner -from drain3.template_miner_config import TemplateMinerConfig -m = TemplateMiner(None, TemplateMinerConfig()) -results = [] -for log in HDXLOGS: - results.append(m.add_log_message(log)) - -json.dumps(results) - `), - ); -} - -const PATTERN_COLUMN_ALIAS = '__hdx_pattern_field'; -const TIMESTAMP_COLUMN_ALIAS = '__hdx_timestamp'; - -const useMinePatterns = ({ - config, - samples, - bodyValueExpression, -}: { - config: ChartConfigWithDateRange; - samples: number; - bodyValueExpression: string; -}) => { - const configWithPrimaryAndPartitionKey = useConfigWithPrimaryAndPartitionKey({ - ...config, - // TODO: User-configurable pattern columns and non-pattern/group by columns - select: [ - `${bodyValueExpression} as ${PATTERN_COLUMN_ALIAS}`, - `${config.timestampValueExpression} as ${TIMESTAMP_COLUMN_ALIAS}`, - ].join(','), - // TODO: Proper sampling - orderBy: [{ ordering: 'DESC', valueExpression: 'rand()' }], - limit: { limit: samples }, - }); - - const { data: sampleRows } = useQueriedChartConfig( - configWithPrimaryAndPartitionKey ?? config, // `config` satisfying type, never used due to `enabled` check - { enabled: configWithPrimaryAndPartitionKey != null }, - ); - - const { data: pyodide } = usePyodide(); - - return useQuery({ - queryKey: ['patterns', config], - queryFn: () => { - if (configWithPrimaryAndPartitionKey == null) { - throw new Error('Unexpected configWithPrimaryAndPartitionKey is null'); - } - - const logs = - sampleRows?.data.map(row => { - return stripAnsi(row[PATTERN_COLUMN_ALIAS] as string); - }) ?? []; - - // patternId -> count, {bucket:count}, pattern - return mineEventPatterns(logs, pyodide).then(result => { - const rowsWithPatternId = []; - for (let i = 0; i < result.length; i++) { - const r = result[i]; - const row = sampleRows?.data[i]; - rowsWithPatternId.push({ - ...row, - __hdx_patternId: r.cluster_id, - __hdx_pattern: r.template_mined, - }); - } - - return { - ...sampleRows, - data: rowsWithPatternId, - additionalKeysLength: - configWithPrimaryAndPartitionKey.additionalKeysLength, - }; - }); - }, - refetchOnWindowFocus: false, - enabled: sampleRows != null && pyodide != null, - }); -}; +import { RawLogTable } from '@/components/DBRowTable'; +import { useSearchTotalCount } from '@/components/SearchTotalCountChart'; +import { useGroupedPatterns } from '@/hooks/usePatterns'; +const emptyMap = new Map(); export default function PatternTable({ config, totalCountConfig, @@ -137,19 +17,7 @@ export default function PatternTable({ bodyValueExpression: string; totalCountQueryKeyPrefix: string; }) { - const SAMPLES = 10000; - const { data: results, isFetching } = useMinePatterns({ - config, - samples: SAMPLES, - bodyValueExpression, - }); - const columnMap = useMemo(() => { - return selectColumnMapWithoutAdditionalKeys( - results?.meta, - results?.additionalKeysLength, - ); - }, [results]); - const columns = useMemo(() => Array.from(columnMap.keys()), [columnMap]); + const SAMPLES = 10_000; const { totalCount, @@ -157,76 +25,15 @@ export default function PatternTable({ isError: isTotalCountError, } = useSearchTotalCount(totalCountConfig, totalCountQueryKeyPrefix); - const sampleMultiplier = useMemo(() => { - return totalCount && results?.data.length - ? totalCount / results?.data.length - : 1; - }, [totalCount, results?.data.length]); - - const granularity = convertDateRangeToGranularityString(config.dateRange, 24); - const timeRangeBuckets = timeBucketByGranularity( - config.dateRange[0], - config.dateRange[1], - granularity, - ); - - // TODO: Group by pattern and other select attributes - const groupedResults = useMemo(() => { - const patternGroups = results?.data.reduce>( - (acc, row) => { - const key = `${row.__hdx_patternId}`; - acc[key] = acc[key] || []; - acc[key].push(row); - return acc; - }, - {} as Record, - ); - - const fullPatternGroups: Record = {}; - // bucket count by timestamp - Object.entries(patternGroups ?? {}).forEach(([patternId, rows]) => { - const initBucketCount: Record = timeRangeBuckets.reduce( - (acc, bucket) => { - acc[bucket.getTime()] = 0; - return acc; - }, - {} as Record, - ); - - const bucketCounts = rows.reduce>((acc, row) => { - const ts = row[TIMESTAMP_COLUMN_ALIAS]; - const bucket = toStartOfInterval(new Date(ts), granularity).getTime(); - acc[bucket] = (acc[bucket] || 0) + 1; - return acc; - }, initBucketCount); - - // return at least 1 - const count = Math.max(Math.round(rows.length * sampleMultiplier), 1); - fullPatternGroups[patternId] = { - id: patternId, - pattern: rows[rows.length - 1].__hdx_pattern, // last pattern is usually the most up to date templated pattern - count, - countStr: `~${count}`, - samples: rows, - __hdx_pattern_trend: { - data: Object.entries(bucketCounts).map(([bucket, count]) => ({ - bucket: Number.parseInt(bucket) / 1000, // recharts expects unix timestamp - count: Math.round(count * sampleMultiplier), - })), - granularity, - dateRange: config.dateRange, - }, - }; + const { data: groupedResults, isLoading: isGroupedPatternsLoading } = + useGroupedPatterns({ + config, + samples: SAMPLES, + bodyValueExpression, + totalCount, }); - return fullPatternGroups; - }, [ - results, - granularity, - sampleMultiplier, - timeRangeBuckets, - config.dateRange, - ]); + const isLoading = isTotalCountLoading || isGroupedPatternsLoading; const sortedGroupedResults = useMemo(() => { return Object.values(groupedResults).sort((a, b) => b.count - a.count); @@ -237,16 +44,14 @@ export default function PatternTable({ {}} - onSettingsClick={() => {}} - onInstructionsClick={() => {}} hasNextPage={false} fetchNextPage={() => {}} highlightedLineId={''} - columnTypeMap={columnMap} + columnTypeMap={emptyMap} generateRowId={row => row.__hdx_patternId} columnNameMap={{ __hdx_pattern_trend: 'Trend', diff --git a/packages/app/src/hooks/usePatterns.tsx b/packages/app/src/hooks/usePatterns.tsx new file mode 100644 index 000000000..6c87e233f --- /dev/null +++ b/packages/app/src/hooks/usePatterns.tsx @@ -0,0 +1,275 @@ +import { useMemo } from 'react'; +import stripAnsi from 'strip-ansi'; +import { ChartConfigWithDateRange } from '@hyperdx/common-utils/dist/types'; +import { useQuery } from '@tanstack/react-query'; + +import { + convertDateRangeToGranularityString, + timeBucketByGranularity, + toStartOfInterval, +} from '@/ChartUtils'; +import { + selectColumnMapWithoutAdditionalKeys, + useConfigWithPrimaryAndPartitionKey, +} from '@/components/DBRowTable'; +import { useQueriedChartConfig } from '@/hooks/useChartConfig'; + +// We don't want to load pyodide over and over again, use react query to cache the async instance +function usePyodide(options: { enabled: boolean }) { + return useQuery({ + queryKey: ['pyodide'], + queryFn: async () => { + // @ts-ignore + const pyodide = await window.loadPyodide(); + await pyodide.loadPackage('micropip'); + const micropip = pyodide.pyimport('micropip'); + const url = new URL( + '/drain3-0.9.11-py3-none-any.whl', + window.location.origin, + ); + await micropip.install(url.href); + return pyodide; + }, + refetchOnWindowFocus: false, + refetchOnMount: false, + refetchInterval: false, + enabled: options.enabled, + }); +} + +class Miner { + private minerVariableName; + private id; + constructor(private pyodide: any) { + this.id = Math.random().toString(36).substring(2, 15); + this.minerVariableName = `m_${this.id}`; + } + + async init() { + await this.pyodide.runPythonAsync(` +import js +import json +from drain3 import TemplateMiner +from drain3.template_miner_config import TemplateMinerConfig + +${this.minerVariableName} = TemplateMiner(None, TemplateMinerConfig()) + `); + } + + async minePatterns(logs: string[]) { + const tempLogs = `temp_logs_${this.id}`; + const tempResults = `temp_results_${this.id}`; + this.pyodide.globals.set(tempLogs, logs); + return JSON.parse( + await this.pyodide.runPythonAsync(` + ${tempResults} = [] + for log in ${tempLogs}: + ${tempResults}.append(${this.minerVariableName}.add_log_message(log)) + json.dumps(${tempResults}) + `), + ); + } + + async matchLogs(logs: string[]) { + const tempLogs = `temp_logs_${this.id}`; + const tempResults = `temp_results_${this.id}`; + this.pyodide.globals.set(tempLogs, logs); + + return JSON.parse( + await this.pyodide.runPythonAsync(` + ${tempResults} = [] + for log in ${tempLogs}: + match = ${this.minerVariableName}.match(log) + ${tempResults}.append(match.cluster_id if match else None) + json.dumps(${tempResults}) + `), + ); + } +} + +async function mineEventPatterns(logs: string[], pyodide: any) { + const miner = new Miner(pyodide); + await miner.init(); + return { + miner, + patterns: await miner.minePatterns(logs), + }; +} + +const PATTERN_COLUMN_ALIAS = '__hdx_pattern_field'; +const TIMESTAMP_COLUMN_ALIAS = '__hdx_timestamp'; + +export function usePatterns({ + config, + samples, + bodyValueExpression, + enabled = true, +}: { + config: ChartConfigWithDateRange; + samples: number; + bodyValueExpression: string; + enabled?: boolean; +}) { + const configWithPrimaryAndPartitionKey = useConfigWithPrimaryAndPartitionKey({ + ...config, + // TODO: User-configurable pattern columns and non-pattern/group by columns + select: [ + `${bodyValueExpression} as ${PATTERN_COLUMN_ALIAS}`, + `${config.timestampValueExpression} as ${TIMESTAMP_COLUMN_ALIAS}`, + ].join(','), + // TODO: Proper sampling + orderBy: [{ ordering: 'DESC', valueExpression: 'rand()' }], + limit: { limit: samples }, + }); + + const { data: sampleRows } = useQueriedChartConfig( + configWithPrimaryAndPartitionKey ?? config, // `config` satisfying type, never used due to `enabled` check + { enabled: configWithPrimaryAndPartitionKey != null && enabled }, + ); + + const { data: pyodide } = usePyodide({ enabled }); + + return useQuery({ + queryKey: ['patterns', config], + queryFn: () => { + if (configWithPrimaryAndPartitionKey == null) { + throw new Error('Unexpected configWithPrimaryAndPartitionKey is null'); + } + + const logs = + sampleRows?.data.map(row => { + return stripAnsi(row[PATTERN_COLUMN_ALIAS] as string); + }) ?? []; + + // patternId -> count, {bucket:count}, pattern + return mineEventPatterns(logs, pyodide).then(response => { + const rowsWithPatternId = []; + const result = response.patterns; + for (let i = 0; i < result.length; i++) { + const r = result[i]; + const row = sampleRows?.data[i]; + rowsWithPatternId.push({ + ...row, + __hdx_patternId: r.cluster_id, + __hdx_pattern: r.template_mined, + }); + } + + return { + ...sampleRows, + data: rowsWithPatternId, + additionalKeysLength: + configWithPrimaryAndPartitionKey.additionalKeysLength, + miner: response.miner, + }; + }); + }, + refetchOnWindowFocus: false, + enabled: sampleRows != null && pyodide != null && enabled, + }); +} + +export function useGroupedPatterns({ + config, + samples, + bodyValueExpression, + totalCount, + enabled = true, +}: { + config: ChartConfigWithDateRange; + samples: number; + bodyValueExpression: string; + totalCount?: number; + enabled?: boolean; +}) { + const { data: results, isFetching } = usePatterns({ + config, + samples, + bodyValueExpression, + enabled, + }); + const columnMap = useMemo(() => { + return selectColumnMapWithoutAdditionalKeys( + results?.meta, + results?.additionalKeysLength, + ); + }, [results]); + const columns = useMemo(() => Array.from(columnMap.keys()), [columnMap]); + + const sampledRowCount = results?.data.length; + const sampleMultiplier = useMemo(() => { + return totalCount && sampledRowCount ? totalCount / sampledRowCount : 1; + }, [totalCount, sampledRowCount]); + + const granularity = convertDateRangeToGranularityString(config.dateRange, 24); + const timeRangeBuckets = timeBucketByGranularity( + config.dateRange[0], + config.dateRange[1], + granularity, + ); + + // TODO: Group by pattern and other select attributes + const groupedResults = useMemo(() => { + const patternGroups = results?.data.reduce>( + (acc, row) => { + const key = `${row.__hdx_patternId}`; + acc[key] = acc[key] || []; + acc[key].push(row); + return acc; + }, + {} as Record, + ); + + const fullPatternGroups: Record = {}; + // bucket count by timestamp + Object.entries(patternGroups ?? {}).forEach(([patternId, rows]) => { + const initBucketCount: Record = timeRangeBuckets.reduce( + (acc, bucket) => { + acc[bucket.getTime()] = 0; + return acc; + }, + {} as Record, + ); + + const bucketCounts = rows.reduce>((acc, row) => { + const ts = row[TIMESTAMP_COLUMN_ALIAS]; + const bucket = toStartOfInterval(new Date(ts), granularity).getTime(); + acc[bucket] = (acc[bucket] || 0) + 1; + return acc; + }, initBucketCount); + + // return at least 1 + const count = Math.max(Math.round(rows.length * sampleMultiplier), 1); + fullPatternGroups[patternId] = { + id: patternId, + pattern: rows[rows.length - 1].__hdx_pattern, // last pattern is usually the most up to date templated pattern + count, + countStr: `~${count}`, + samples: rows, + __hdx_pattern_trend: { + data: Object.entries(bucketCounts).map(([bucket, count]) => ({ + bucket: Number.parseInt(bucket) / 1000, // recharts expects unix timestamp + count: Math.round(count * sampleMultiplier), + })), + granularity, + dateRange: config.dateRange, + }, + }; + }); + + return fullPatternGroups; + }, [ + results, + granularity, + sampleMultiplier, + timeRangeBuckets, + config.dateRange, + ]); + + return { + data: groupedResults, + isLoading: isFetching, + miner: results?.miner, + sampledRowCount, + }; +} diff --git a/packages/app/src/hooks/useRowWhere.tsx b/packages/app/src/hooks/useRowWhere.tsx index c04debac3..82549c353 100644 --- a/packages/app/src/hooks/useRowWhere.tsx +++ b/packages/app/src/hooks/useRowWhere.tsx @@ -48,12 +48,14 @@ export default function useRowWhere({ if (jsType == null || chType == null) { throw new Error( - `Column type not found for ${column}, ${columnMap}`, + `Column type not found for ${column}, ${JSON.stringify(columnMap)}`, ); } if (valueExpr == null) { - throw new Error(`valueExpr not found for ${column}, ${columnMap}`); + throw new Error( + `valueExpr not found for ${column}, ${JSON.stringify(columnMap)}`, + ); } switch (jsType) { From bb6e26fd597e762460ad09fecc6df2edfa75f57c Mon Sep 17 00:00:00 2001 From: Mike Shi Date: Mon, 5 May 2025 20:54:53 -0700 Subject: [PATCH 2/3] handle when no noisy patterns are found --- packages/app/src/components/DBRowTable.tsx | 15 ++++++- .../src/components/DBSearchPageFilters.tsx | 40 ++++++++++--------- 2 files changed, 34 insertions(+), 21 deletions(-) diff --git a/packages/app/src/components/DBRowTable.tsx b/packages/app/src/components/DBRowTable.tsx index 9266f162d..5ab7418ef 100644 --- a/packages/app/src/components/DBRowTable.tsx +++ b/packages/app/src/components/DBRowTable.tsx @@ -1028,6 +1028,11 @@ export function DBSqlRowTable({ patternColumn, ], queryFn: async () => { + // No noisy patterns, so no need to denoise + if (noisyPatternIds.length === 0) { + return processedRows; + } + const matchedLogs = await groupedPatterns.miner?.matchLogs( processedRows.map(row => row[patternColumn]), ); @@ -1047,9 +1052,10 @@ export function DBSqlRowTable({ return undefined; }, enabled: + denoiseResults && + noisyPatterns.isSuccess && processedRows.length > 0 && - groupedPatterns.miner != null && - noisyPatternIds.length > 0, + groupedPatterns.miner != null, }); const isLoading = denoiseResults @@ -1072,6 +1078,11 @@ export function DBSqlRowTable({ {p.pattern} ))} + {noisyPatternIds.length === 0 && ( + + No noisy patterns found + + )} )} diff --git a/packages/app/src/components/DBSearchPageFilters.tsx b/packages/app/src/components/DBSearchPageFilters.tsx index 9ad57cdea..e55a60d1c 100644 --- a/packages/app/src/components/DBSearchPageFilters.tsx +++ b/packages/app/src/components/DBSearchPageFilters.tsx @@ -493,25 +493,27 @@ export const DBSearchPageFilters = ({ )} - - - Denoise Results - - - } - onChange={() => setDenoiseResults(!denoiseResults)} - /> + {analysisMode === 'results' && ( + + + Denoise Results + + + } + onChange={() => setDenoiseResults(!denoiseResults)} + /> + )} {isLoading || isFacetsLoading ? ( From b7971aed2d846ab80bd706f6fcc45a4bc17ebe8a Mon Sep 17 00:00:00 2001 From: Mike Shi Date: Tue, 6 May 2025 10:13:48 -0700 Subject: [PATCH 3/3] remove unused imports --- packages/app/src/components/DBRowTable.tsx | 2 -- 1 file changed, 2 deletions(-) diff --git a/packages/app/src/components/DBRowTable.tsx b/packages/app/src/components/DBRowTable.tsx index 5ab7418ef..f57eeb1e4 100644 --- a/packages/app/src/components/DBRowTable.tsx +++ b/packages/app/src/components/DBRowTable.tsx @@ -29,8 +29,6 @@ import { splitAndTrimWithBracket } from '@hyperdx/common-utils/dist/utils'; import { Box, Code, Flex, Text } from '@mantine/core'; import { FetchNextPageOptions, - keepPreviousData, - QueryClient, useQuery, useQueryClient, } from '@tanstack/react-query';