From 3654fa6ee7be08dc4c1d4a9ac35df2a839191440 Mon Sep 17 00:00:00 2001 From: Waleed Latif Date: Wed, 14 Jan 2026 18:30:11 -0800 Subject: [PATCH 1/2] feat(dashboard): added stats endpoint to compute stats on server side and avoid limit --- apps/sim/app/api/logs/stats/route.ts | 290 ++++++++++++++++++ .../logs/components/dashboard/dashboard.tsx | 242 +++------------ .../app/workspace/[workspaceId]/logs/logs.tsx | 10 +- apps/sim/blocks/blocks/langsmith.ts | 1 - apps/sim/hooks/queries/logs.ts | 99 ++++-- 5 files changed, 411 insertions(+), 231 deletions(-) create mode 100644 apps/sim/app/api/logs/stats/route.ts diff --git a/apps/sim/app/api/logs/stats/route.ts b/apps/sim/app/api/logs/stats/route.ts new file mode 100644 index 0000000000..d1e1fe2820 --- /dev/null +++ b/apps/sim/app/api/logs/stats/route.ts @@ -0,0 +1,290 @@ +import { db } from '@sim/db' +import { permissions, workflow, workflowExecutionLogs } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq, sql } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { getSession } from '@/lib/auth' +import { generateRequestId } from '@/lib/core/utils/request' +import { buildFilterConditions, LogFilterParamsSchema } from '@/lib/logs/filters' + +const logger = createLogger('LogsStatsAPI') + +export const revalidate = 0 + +const StatsQueryParamsSchema = LogFilterParamsSchema.extend({ + segmentCount: z.coerce.number().optional().default(72), +}) + +export interface SegmentStats { + timestamp: string + totalExecutions: number + successfulExecutions: number + avgDurationMs: number +} + +export interface WorkflowStats { + workflowId: string + workflowName: string + segments: SegmentStats[] + overallSuccessRate: number + totalExecutions: number + totalSuccessful: number +} + +export interface DashboardStatsResponse { + workflows: WorkflowStats[] + aggregateSegments: SegmentStats[] + totalRuns: number + totalErrors: number + avgLatency: number + timeBounds: { + start: string + end: string + } + segmentMs: number +} + +export async function GET(request: NextRequest) { + const requestId = generateRequestId() + + try { + const session = await getSession() + if (!session?.user?.id) { + logger.warn(`[${requestId}] Unauthorized logs stats access attempt`) + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const userId = session.user.id + + try { + const { searchParams } = new URL(request.url) + const params = StatsQueryParamsSchema.parse(Object.fromEntries(searchParams.entries())) + + const workspaceFilter = eq(workflowExecutionLogs.workspaceId, params.workspaceId) + + const commonFilters = buildFilterConditions(params, { useSimpleLevelFilter: true }) + const whereCondition = commonFilters ? and(workspaceFilter, commonFilters) : workspaceFilter + + const boundsQuery = await db + .select({ + minTime: sql`MIN(${workflowExecutionLogs.startedAt})`, + maxTime: sql`MAX(${workflowExecutionLogs.startedAt})`, + }) + .from(workflowExecutionLogs) + .innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id)) + .innerJoin( + permissions, + and( + eq(permissions.entityType, 'workspace'), + eq(permissions.entityId, workflowExecutionLogs.workspaceId), + eq(permissions.userId, userId) + ) + ) + .where(whereCondition) + + const bounds = boundsQuery[0] + const now = new Date() + + let startTime: Date + let endTime: Date + + if (!bounds?.minTime || !bounds?.maxTime) { + endTime = now + startTime = new Date(now.getTime() - 24 * 60 * 60 * 1000) + } else { + startTime = new Date(bounds.minTime) + endTime = new Date(Math.max(new Date(bounds.maxTime).getTime(), now.getTime())) + } + + const totalMs = Math.max(1, endTime.getTime() - startTime.getTime()) + const segmentMs = Math.max(60000, Math.floor(totalMs / params.segmentCount)) + + const statsQuery = await db + .select({ + workflowId: workflowExecutionLogs.workflowId, + workflowName: workflow.name, + segmentIndex: + sql`FLOOR(EXTRACT(EPOCH FROM (${workflowExecutionLogs.startedAt} - ${startTime}::timestamp)) * 1000 / ${segmentMs})`.as( + 'segment_index' + ), + totalExecutions: sql`COUNT(*)`.as('total_executions'), + successfulExecutions: + sql`COUNT(*) FILTER (WHERE ${workflowExecutionLogs.level} != 'error')`.as( + 'successful_executions' + ), + avgDurationMs: + sql`COALESCE(AVG(${workflowExecutionLogs.totalDurationMs}) FILTER (WHERE ${workflowExecutionLogs.totalDurationMs} > 0), 0)`.as( + 'avg_duration_ms' + ), + }) + .from(workflowExecutionLogs) + .innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id)) + .innerJoin( + permissions, + and( + eq(permissions.entityType, 'workspace'), + eq(permissions.entityId, workflowExecutionLogs.workspaceId), + eq(permissions.userId, userId) + ) + ) + .where(whereCondition) + .groupBy( + workflowExecutionLogs.workflowId, + workflow.name, + sql`FLOOR(EXTRACT(EPOCH FROM (${workflowExecutionLogs.startedAt} - ${startTime}::timestamp)) * 1000 / ${segmentMs})` + ) + .orderBy(workflowExecutionLogs.workflowId, sql`segment_index`) + + const workflowMap = new Map< + string, + { + workflowId: string + workflowName: string + segments: Map + totalExecutions: number + totalSuccessful: number + } + >() + + for (const row of statsQuery) { + const segmentIndex = Math.min( + params.segmentCount - 1, + Math.max(0, Math.floor(Number(row.segmentIndex))) + ) + + if (!workflowMap.has(row.workflowId)) { + workflowMap.set(row.workflowId, { + workflowId: row.workflowId, + workflowName: row.workflowName, + segments: new Map(), + totalExecutions: 0, + totalSuccessful: 0, + }) + } + + const wf = workflowMap.get(row.workflowId)! + wf.totalExecutions += Number(row.totalExecutions) + wf.totalSuccessful += Number(row.successfulExecutions) + + const existing = wf.segments.get(segmentIndex) + if (existing) { + existing.totalExecutions += Number(row.totalExecutions) + existing.successfulExecutions += Number(row.successfulExecutions) + existing.avgDurationMs = (existing.avgDurationMs + Number(row.avgDurationMs || 0)) / 2 + } else { + wf.segments.set(segmentIndex, { + timestamp: new Date(startTime.getTime() + segmentIndex * segmentMs).toISOString(), + totalExecutions: Number(row.totalExecutions), + successfulExecutions: Number(row.successfulExecutions), + avgDurationMs: Number(row.avgDurationMs || 0), + }) + } + } + + const workflows: WorkflowStats[] = [] + for (const wf of workflowMap.values()) { + const segments: SegmentStats[] = [] + for (let i = 0; i < params.segmentCount; i++) { + const existing = wf.segments.get(i) + if (existing) { + segments.push(existing) + } else { + segments.push({ + timestamp: new Date(startTime.getTime() + i * segmentMs).toISOString(), + totalExecutions: 0, + successfulExecutions: 0, + avgDurationMs: 0, + }) + } + } + + workflows.push({ + workflowId: wf.workflowId, + workflowName: wf.workflowName, + segments, + totalExecutions: wf.totalExecutions, + totalSuccessful: wf.totalSuccessful, + overallSuccessRate: + wf.totalExecutions > 0 ? (wf.totalSuccessful / wf.totalExecutions) * 100 : 100, + }) + } + + workflows.sort((a, b) => { + const errA = a.overallSuccessRate < 100 ? 1 - a.overallSuccessRate / 100 : 0 + const errB = b.overallSuccessRate < 100 ? 1 - b.overallSuccessRate / 100 : 0 + if (errA !== errB) return errB - errA + return a.workflowName.localeCompare(b.workflowName) + }) + + const aggregateSegments: SegmentStats[] = [] + let totalRuns = 0 + let totalErrors = 0 + let weightedLatencySum = 0 + let latencyCount = 0 + + for (let i = 0; i < params.segmentCount; i++) { + let segTotal = 0 + let segSuccess = 0 + let segWeightedLatency = 0 + let segLatencyCount = 0 + + for (const wf of workflows) { + const seg = wf.segments[i] + segTotal += seg.totalExecutions + segSuccess += seg.successfulExecutions + if (seg.avgDurationMs > 0 && seg.totalExecutions > 0) { + segWeightedLatency += seg.avgDurationMs * seg.totalExecutions + segLatencyCount += seg.totalExecutions + } + } + + totalRuns += segTotal + totalErrors += segTotal - segSuccess + weightedLatencySum += segWeightedLatency + latencyCount += segLatencyCount + + aggregateSegments.push({ + timestamp: new Date(startTime.getTime() + i * segmentMs).toISOString(), + totalExecutions: segTotal, + successfulExecutions: segSuccess, + avgDurationMs: segLatencyCount > 0 ? segWeightedLatency / segLatencyCount : 0, + }) + } + + const avgLatency = latencyCount > 0 ? weightedLatencySum / latencyCount : 0 + + const response: DashboardStatsResponse = { + workflows, + aggregateSegments, + totalRuns, + totalErrors, + avgLatency, + timeBounds: { + start: startTime.toISOString(), + end: endTime.toISOString(), + }, + segmentMs, + } + + return NextResponse.json(response, { status: 200 }) + } catch (validationError) { + if (validationError instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid logs stats request parameters`, { + errors: validationError.errors, + }) + return NextResponse.json( + { + error: 'Invalid request parameters', + details: validationError.errors, + }, + { status: 400 } + ) + } + throw validationError + } + } catch (error: any) { + logger.error(`[${requestId}] logs stats fetch error`, error) + return NextResponse.json({ error: error.message }, { status: 500 }) + } +} diff --git a/apps/sim/app/workspace/[workspaceId]/logs/components/dashboard/dashboard.tsx b/apps/sim/app/workspace/[workspaceId]/logs/components/dashboard/dashboard.tsx index e573b813c4..aebf9df71e 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/components/dashboard/dashboard.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/components/dashboard/dashboard.tsx @@ -3,9 +3,9 @@ import { useCallback, useEffect, useMemo, useRef, useState } from 'react' import { Loader2 } from 'lucide-react' import { Skeleton } from '@/components/ui/skeleton' -import { formatLatency, parseDuration } from '@/app/workspace/[workspaceId]/logs/utils' +import { formatLatency } from '@/app/workspace/[workspaceId]/logs/utils' +import type { DashboardStatsResponse, WorkflowStats } from '@/hooks/queries/logs' import { useFilterStore } from '@/stores/logs/filters/store' -import type { WorkflowLog } from '@/stores/logs/filters/types' import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { LineChart, WorkflowsList } from './components' @@ -26,10 +26,6 @@ interface WorkflowExecution { overallSuccessRate: number } -const DEFAULT_SEGMENTS = 72 -const MIN_SEGMENT_PX = 10 -const MIN_SEGMENT_MS = 60000 - const SKELETON_BAR_HEIGHTS = [ 45, 72, 38, 85, 52, 68, 30, 90, 55, 42, 78, 35, 88, 48, 65, 28, 82, 58, 40, 75, 32, 95, 50, 70, ] @@ -120,13 +116,32 @@ function DashboardSkeleton() { } interface DashboardProps { - logs: WorkflowLog[] + stats?: DashboardStatsResponse isLoading: boolean error?: Error | null } -export default function Dashboard({ logs, isLoading, error }: DashboardProps) { - const [segmentCount, setSegmentCount] = useState(DEFAULT_SEGMENTS) +/** + * Converts server WorkflowStats to the internal WorkflowExecution format. + */ +function toWorkflowExecution(wf: WorkflowStats): WorkflowExecution { + return { + workflowId: wf.workflowId, + workflowName: wf.workflowName, + overallSuccessRate: wf.overallSuccessRate, + segments: wf.segments.map((seg) => ({ + timestamp: seg.timestamp, + totalExecutions: seg.totalExecutions, + successfulExecutions: seg.successfulExecutions, + hasExecutions: seg.totalExecutions > 0, + successRate: + seg.totalExecutions > 0 ? (seg.successfulExecutions / seg.totalExecutions) * 100 : 100, + avgDurationMs: seg.avgDurationMs, + })), + } +} + +export default function Dashboard({ stats, isLoading, error }: DashboardProps) { const [selectedSegments, setSelectedSegments] = useState>({}) const [lastAnchorIndices, setLastAnchorIndices] = useState>({}) const barsAreaRef = useRef(null) @@ -137,182 +152,32 @@ export default function Dashboard({ logs, isLoading, error }: DashboardProps) { const expandedWorkflowId = workflowIds.length === 1 ? workflowIds[0] : null - const lastExecutionByWorkflow = useMemo(() => { - const map = new Map() - for (const log of logs) { - const wfId = log.workflowId - if (!wfId) continue - const ts = new Date(log.createdAt).getTime() - const existing = map.get(wfId) - if (!existing || ts > existing) { - map.set(wfId, ts) - } - } - return map - }, [logs]) - - const timeBounds = useMemo(() => { - if (logs.length === 0) { - const now = new Date() - return { start: now, end: now } - } - - let minTime = Number.POSITIVE_INFINITY - let maxTime = Number.NEGATIVE_INFINITY - - for (const log of logs) { - const ts = new Date(log.createdAt).getTime() - if (ts < minTime) minTime = ts - if (ts > maxTime) maxTime = ts - } - - const end = new Date(Math.max(maxTime, Date.now())) - const start = new Date(minTime) - - return { start, end } - }, [logs]) - const { executions, aggregateSegments, segmentMs } = useMemo(() => { - const allWorkflowsList = Object.values(allWorkflows) - - if (allWorkflowsList.length === 0) { + if (!stats) { return { executions: [], aggregateSegments: [], segmentMs: 0 } } - const { start, end } = - logs.length > 0 - ? timeBounds - : { start: new Date(Date.now() - 24 * 60 * 60 * 1000), end: new Date() } - - const totalMs = Math.max(1, end.getTime() - start.getTime()) - const calculatedSegmentMs = Math.max( - MIN_SEGMENT_MS, - Math.floor(totalMs / Math.max(1, segmentCount)) - ) - - const logsByWorkflow = new Map() - for (const log of logs) { - const wfId = log.workflowId - if (!logsByWorkflow.has(wfId)) { - logsByWorkflow.set(wfId, []) - } - logsByWorkflow.get(wfId)!.push(log) - } - - const workflowExecutions: WorkflowExecution[] = [] - - for (const workflow of allWorkflowsList) { - const workflowLogs = logsByWorkflow.get(workflow.id) || [] + const workflowExecutions = stats.workflows.map(toWorkflowExecution) - const segments: WorkflowExecution['segments'] = Array.from( - { length: segmentCount }, - (_, i) => ({ - timestamp: new Date(start.getTime() + i * calculatedSegmentMs).toISOString(), - hasExecutions: false, - totalExecutions: 0, - successfulExecutions: 0, - successRate: 100, - avgDurationMs: 0, - }) - ) - - const durations: number[][] = Array.from({ length: segmentCount }, () => []) - - for (const log of workflowLogs) { - const logTime = new Date(log.createdAt).getTime() - const idx = Math.min( - segmentCount - 1, - Math.max(0, Math.floor((logTime - start.getTime()) / calculatedSegmentMs)) - ) - - segments[idx].totalExecutions += 1 - segments[idx].hasExecutions = true - - if (log.level !== 'error') { - segments[idx].successfulExecutions += 1 - } - - const duration = parseDuration({ duration: log.duration ?? undefined }) - if (duration !== null && duration > 0) { - durations[idx].push(duration) - } - } - - let totalExecs = 0 - let totalSuccess = 0 - - for (let i = 0; i < segmentCount; i++) { - const seg = segments[i] - totalExecs += seg.totalExecutions - totalSuccess += seg.successfulExecutions - - if (seg.totalExecutions > 0) { - seg.successRate = (seg.successfulExecutions / seg.totalExecutions) * 100 - } - - if (durations[i].length > 0) { - seg.avgDurationMs = Math.round( - durations[i].reduce((sum, d) => sum + d, 0) / durations[i].length - ) - } - } - - const overallSuccessRate = totalExecs > 0 ? (totalSuccess / totalExecs) * 100 : 100 - - workflowExecutions.push({ - workflowId: workflow.id, - workflowName: workflow.name, - segments, - overallSuccessRate, - }) + return { + executions: workflowExecutions, + aggregateSegments: stats.aggregateSegments, + segmentMs: stats.segmentMs, } + }, [stats]) - workflowExecutions.sort((a, b) => { - const errA = a.overallSuccessRate < 100 ? 1 - a.overallSuccessRate / 100 : 0 - const errB = b.overallSuccessRate < 100 ? 1 - b.overallSuccessRate / 100 : 0 - if (errA !== errB) return errB - errA - return a.workflowName.localeCompare(b.workflowName) - }) - - const aggSegments: { - timestamp: string - totalExecutions: number - successfulExecutions: number - avgDurationMs: number - }[] = Array.from({ length: segmentCount }, (_, i) => ({ - timestamp: new Date(start.getTime() + i * calculatedSegmentMs).toISOString(), - totalExecutions: 0, - successfulExecutions: 0, - avgDurationMs: 0, - })) - - const weightedDurationSums: number[] = Array(segmentCount).fill(0) - const executionCounts: number[] = Array(segmentCount).fill(0) - - for (const wf of workflowExecutions) { - wf.segments.forEach((s, i) => { - aggSegments[i].totalExecutions += s.totalExecutions - aggSegments[i].successfulExecutions += s.successfulExecutions - - if (s.avgDurationMs && s.avgDurationMs > 0 && s.totalExecutions > 0) { - weightedDurationSums[i] += s.avgDurationMs * s.totalExecutions - executionCounts[i] += s.totalExecutions + const lastExecutionByWorkflow = useMemo(() => { + const map = new Map() + for (const wf of executions) { + for (let i = wf.segments.length - 1; i >= 0; i--) { + if (wf.segments[i].totalExecutions > 0) { + map.set(wf.workflowId, new Date(wf.segments[i].timestamp).getTime()) + break } - }) - } - - aggSegments.forEach((seg, i) => { - if (executionCounts[i] > 0) { - seg.avgDurationMs = weightedDurationSums[i] / executionCounts[i] } - }) - - return { - executions: workflowExecutions, - aggregateSegments: aggSegments, - segmentMs: calculatedSegmentMs, } - }, [logs, timeBounds, segmentCount, allWorkflows]) + return map + }, [executions]) const filteredExecutions = useMemo(() => { let filtered = executions @@ -511,37 +376,12 @@ export default function Dashboard({ logs, isLoading, error }: DashboardProps) { useEffect(() => { setSelectedSegments({}) setLastAnchorIndices({}) - }, [logs, timeRange, workflowIds, searchQuery]) - - useEffect(() => { - if (!barsAreaRef.current) return - const el = barsAreaRef.current - let debounceId: ReturnType | null = null - const ro = new ResizeObserver(([entry]) => { - const w = entry?.contentRect?.width || 720 - const n = Math.max(36, Math.min(96, Math.floor(w / MIN_SEGMENT_PX))) - if (debounceId) clearTimeout(debounceId) - debounceId = setTimeout(() => { - setSegmentCount(n) - }, 150) - }) - ro.observe(el) - const rect = el.getBoundingClientRect() - if (rect?.width) { - const n = Math.max(36, Math.min(96, Math.floor(rect.width / MIN_SEGMENT_PX))) - setSegmentCount(n) - } - return () => { - if (debounceId) clearTimeout(debounceId) - ro.disconnect() - } - }, []) + }, [stats, timeRange, workflowIds, searchQuery]) if (isLoading) { return } - // Show error state if (error) { return (
diff --git a/apps/sim/app/workspace/[workspaceId]/logs/logs.tsx b/apps/sim/app/workspace/[workspaceId]/logs/logs.tsx index a2ea88d338..014c4ad673 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/logs.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/logs.tsx @@ -11,7 +11,7 @@ import { } from '@/lib/logs/filters' import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser' import { useFolders } from '@/hooks/queries/folders' -import { useDashboardLogs, useLogDetail, useLogsList } from '@/hooks/queries/logs' +import { useDashboardStats, useLogDetail, useLogsList } from '@/hooks/queries/logs' import { useDebounce } from '@/hooks/use-debounce' import { useFilterStore } from '@/stores/logs/filters/store' import type { WorkflowLog } from '@/stores/logs/filters/types' @@ -130,7 +130,7 @@ export default function Logs() { [timeRange, startDate, endDate, level, workflowIds, folderIds, triggers, debouncedSearchQuery] ) - const dashboardLogsQuery = useDashboardLogs(workspaceId, dashboardFilters, { + const dashboardStatsQuery = useDashboardStats(workspaceId, dashboardFilters, { enabled: Boolean(workspaceId) && isInitialized.current, refetchInterval: isLive ? 5000 : false, }) @@ -417,9 +417,9 @@ export default function Logs() { className={cn('flex min-h-0 flex-1 flex-col pr-[24px]', !isDashboardView && 'hidden')} >
diff --git a/apps/sim/blocks/blocks/langsmith.ts b/apps/sim/blocks/blocks/langsmith.ts index 69836fff0d..519f22c6a1 100644 --- a/apps/sim/blocks/blocks/langsmith.ts +++ b/apps/sim/blocks/blocks/langsmith.ts @@ -199,7 +199,6 @@ export const LangsmithBlock: BlockConfig = { switch (params.operation) { case 'create_runs_batch': return 'langsmith_create_runs_batch' - case 'create_run': default: return 'langsmith_create_run' } diff --git a/apps/sim/hooks/queries/logs.ts b/apps/sim/hooks/queries/logs.ts index 9325ae0992..c7a916c4d8 100644 --- a/apps/sim/hooks/queries/logs.ts +++ b/apps/sim/hooks/queries/logs.ts @@ -1,8 +1,15 @@ import { keepPreviousData, useInfiniteQuery, useQuery } from '@tanstack/react-query' import { getEndDateFromTimeRange, getStartDateFromTimeRange } from '@/lib/logs/filters' import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser' +import type { + DashboardStatsResponse, + SegmentStats, + WorkflowStats, +} from '@/app/api/logs/stats/route' import type { LogsResponse, TimeRange, WorkflowLog } from '@/stores/logs/filters/types' +export type { DashboardStatsResponse, SegmentStats, WorkflowStats } + export const logKeys = { all: ['logs'] as const, lists: () => [...logKeys.all, 'list'] as const, @@ -10,8 +17,8 @@ export const logKeys = { [...logKeys.lists(), workspaceId ?? '', filters] as const, details: () => [...logKeys.all, 'detail'] as const, detail: (logId: string | undefined) => [...logKeys.details(), logId ?? ''] as const, - dashboard: (workspaceId: string | undefined, filters: Record) => - [...logKeys.all, 'dashboard', workspaceId ?? '', filters] as const, + stats: (workspaceId: string | undefined, filters: object) => + [...logKeys.all, 'stats', workspaceId ?? '', filters] as const, executionSnapshots: () => [...logKeys.all, 'executionSnapshot'] as const, executionSnapshot: (executionId: string | undefined) => [...logKeys.executionSnapshots(), executionId ?? ''] as const, @@ -147,52 +154,96 @@ export function useLogDetail(logId: string | undefined) { }) } -const DASHBOARD_LOGS_LIMIT = 10000 +interface DashboardFilters { + timeRange: TimeRange + startDate?: string + endDate?: string + level: string + workflowIds: string[] + folderIds: string[] + triggers: string[] + searchQuery: string + segmentCount?: number +} /** - * Fetches all logs for dashboard metrics (non-paginated). - * Uses same filters as the logs list but with a high limit to get all data. + * Fetches aggregated dashboard statistics from the server. + * Uses SQL aggregation for efficient computation without row limits. */ -async function fetchAllLogs( +async function fetchDashboardStats( workspaceId: string, - filters: Omit -): Promise { + filters: DashboardFilters +): Promise { const params = new URLSearchParams() params.set('workspaceId', workspaceId) - params.set('limit', DASHBOARD_LOGS_LIMIT.toString()) - params.set('offset', '0') - applyFilterParams(params, filters) + if (filters.segmentCount) { + params.set('segmentCount', filters.segmentCount.toString()) + } + + if (filters.level !== 'all') { + params.set('level', filters.level) + } + + if (filters.triggers.length > 0) { + params.set('triggers', filters.triggers.join(',')) + } + + if (filters.workflowIds.length > 0) { + params.set('workflowIds', filters.workflowIds.join(',')) + } - const response = await fetch(`/api/logs?${params.toString()}`) + if (filters.folderIds.length > 0) { + params.set('folderIds', filters.folderIds.join(',')) + } + + const startDate = getStartDateFromTimeRange(filters.timeRange, filters.startDate) + if (startDate) { + params.set('startDate', startDate.toISOString()) + } + + const endDate = getEndDateFromTimeRange(filters.timeRange, filters.endDate) + if (endDate) { + params.set('endDate', endDate.toISOString()) + } + + if (filters.searchQuery.trim()) { + const parsedQuery = parseQuery(filters.searchQuery.trim()) + const searchParams = queryToApiParams(parsedQuery) + + for (const [key, value] of Object.entries(searchParams)) { + params.set(key, value) + } + } + + const response = await fetch(`/api/logs/stats?${params.toString()}`) if (!response.ok) { - throw new Error('Failed to fetch logs for dashboard') + throw new Error('Failed to fetch dashboard stats') } - const apiData: LogsResponse = await response.json() - return apiData.data || [] + return response.json() } -interface UseDashboardLogsOptions { +interface UseDashboardStatsOptions { enabled?: boolean refetchInterval?: number | false } /** - * Hook for fetching all logs for dashboard metrics. - * Unlike useLogsList, this fetches all logs in a single request - * to ensure dashboard metrics are computed from complete data. + * Hook for fetching aggregated dashboard statistics. + * Uses server-side SQL aggregation for efficient computation + * without any row limits - all matching logs are included in the stats. */ -export function useDashboardLogs( +export function useDashboardStats( workspaceId: string | undefined, - filters: Omit, - options?: UseDashboardLogsOptions + filters: DashboardFilters, + options?: UseDashboardStatsOptions ) { return useQuery({ - queryKey: logKeys.dashboard(workspaceId, filters), - queryFn: () => fetchAllLogs(workspaceId as string, filters), + queryKey: logKeys.stats(workspaceId, filters), + queryFn: () => fetchDashboardStats(workspaceId as string, filters), enabled: Boolean(workspaceId) && (options?.enabled ?? true), refetchInterval: options?.refetchInterval ?? false, staleTime: 0, From 8fbb7954a04562ebcfe7bf1cfbb0a27af48a2e42 Mon Sep 17 00:00:00 2001 From: Waleed Latif Date: Wed, 14 Jan 2026 18:37:03 -0800 Subject: [PATCH 2/2] updated query --- apps/sim/app/api/logs/stats/route.ts | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/apps/sim/app/api/logs/stats/route.ts b/apps/sim/app/api/logs/stats/route.ts index d1e1fe2820..686b10d50c 100644 --- a/apps/sim/app/api/logs/stats/route.ts +++ b/apps/sim/app/api/logs/stats/route.ts @@ -169,9 +169,16 @@ export async function GET(request: NextRequest) { const existing = wf.segments.get(segmentIndex) if (existing) { - existing.totalExecutions += Number(row.totalExecutions) + const oldTotal = existing.totalExecutions + const newTotal = oldTotal + Number(row.totalExecutions) + existing.totalExecutions = newTotal existing.successfulExecutions += Number(row.successfulExecutions) - existing.avgDurationMs = (existing.avgDurationMs + Number(row.avgDurationMs || 0)) / 2 + existing.avgDurationMs = + newTotal > 0 + ? (existing.avgDurationMs * oldTotal + + Number(row.avgDurationMs || 0) * Number(row.totalExecutions)) / + newTotal + : 0 } else { wf.segments.set(segmentIndex, { timestamp: new Date(startTime.getTime() + segmentIndex * segmentMs).toISOString(),