mirror of
https://github.com/supabase/supabase.git
synced 2026-05-06 08:56:46 -04:00
0433eeb5f5
Mark provenance of SQL via the branded types SafeSqlFragment and UntrustedSqlFragment. Only SafeSqlFragment should be executed; UntrustedSqlFragments require some kind of implicit user approval (show on screen + user has to click something) before they are promoted to SafeSqlFragment. <!-- This is an auto-generated comment: release notes by coderabbit.ai --> ## Summary by CodeRabbit * **New Features** * Editor and RLS tester show loading states for inferred/generated SQL and include a dedicated user SQL editor for safer edits. * **Refactor** * Platform-wide SQL handling tightened: snippets and AI-generated SQL are treated as untrusted/display-only until promoted, improving safety and consistency. <!-- end of auto-generated comment: release notes by coderabbit.ai -->
211 lines
7.0 KiB
TypeScript
211 lines
7.0 KiB
TypeScript
import { type SafeSqlFragment } from '@supabase/pg-meta'
|
|
|
|
import type { QueryPerformanceRow } from '../../QueryPerformance/QueryPerformance.types'
|
|
import type { Logs } from '../../Settings/Logs/Logs.types'
|
|
import {
|
|
SCHEMA_INTROSPECTION_REGEX,
|
|
SUPAMONITOR_EXCLUDED_APP_NAMES,
|
|
SUPAMONITOR_EXCLUDED_ROLES,
|
|
TRANSACTION_CONTROL_REGEX,
|
|
} from '../QueryInsights.constants'
|
|
import type { ChartDataPoint, ParsedLogEntry } from '../QueryInsights.types'
|
|
|
|
export function filterSystemLogs(
|
|
logs: ParsedLogEntry[],
|
|
{ includeIntrospection = false }: { includeIntrospection?: boolean } = {}
|
|
): ParsedLogEntry[] {
|
|
return logs.filter((log) => {
|
|
if (log.user_name && (SUPAMONITOR_EXCLUDED_ROLES as readonly string[]).includes(log.user_name))
|
|
return false
|
|
if (
|
|
log.application_name &&
|
|
(SUPAMONITOR_EXCLUDED_APP_NAMES as readonly string[]).includes(log.application_name)
|
|
)
|
|
return false
|
|
if (log.query && TRANSACTION_CONTROL_REGEX.test(log.query)) return false
|
|
if (!includeIntrospection && log.query && SCHEMA_INTROSPECTION_REGEX.test(log.query))
|
|
return false
|
|
return true
|
|
})
|
|
}
|
|
|
|
function asString(unknown: unknown): string | undefined {
|
|
if (typeof unknown === 'string') return unknown
|
|
if (unknown === null || unknown === undefined) return undefined
|
|
return String(unknown)
|
|
}
|
|
|
|
function asNumber(unknown: unknown): number | undefined {
|
|
if (typeof unknown === 'number') return unknown
|
|
if (unknown === null || unknown === undefined) return undefined
|
|
const parsed = Number(unknown)
|
|
return Number.isNaN(parsed) ? undefined : parsed
|
|
}
|
|
|
|
export function parseSupamonitorLogs(logData: Logs['result']): ParsedLogEntry[] {
|
|
if (!logData || logData.length === 0) return []
|
|
|
|
return logData.map((log) => ({
|
|
timestamp: asString(log.timestamp),
|
|
application_name: asString(log.application_name),
|
|
calls: asNumber(log.calls),
|
|
database_name: asString(log.database_name),
|
|
query: log.query,
|
|
query_id: asNumber(log.query_id),
|
|
total_exec_time: asNumber(log.total_exec_time),
|
|
total_plan_time: asNumber(log.total_plan_time),
|
|
user_name: asString(log.user_name),
|
|
mean_exec_time: asNumber(log.mean_exec_time),
|
|
mean_plan_time: asNumber(log.mean_plan_time),
|
|
min_exec_time: asNumber(log.min_exec_time),
|
|
max_exec_time: asNumber(log.max_exec_time),
|
|
min_plan_time: asNumber(log.min_plan_time),
|
|
max_plan_time: asNumber(log.max_plan_time),
|
|
p50_exec_time: asNumber(log.p50_exec_time),
|
|
p95_exec_time: asNumber(log.p95_exec_time),
|
|
p50_plan_time: asNumber(log.p50_plan_time),
|
|
p95_plan_time: asNumber(log.p95_plan_time),
|
|
}))
|
|
}
|
|
|
|
export function transformLogsToChartData(parsedLogs: ParsedLogEntry[]): ChartDataPoint[] {
|
|
if (!parsedLogs || parsedLogs.length === 0) return []
|
|
|
|
return parsedLogs
|
|
.map((log: ParsedLogEntry) => {
|
|
if (!log.timestamp) return null
|
|
|
|
const periodStart = new Date(log.timestamp).getTime()
|
|
if (isNaN(periodStart)) return null
|
|
|
|
const meanExecTime = parseFloat(String(log.mean_exec_time ?? 0))
|
|
const meanPlanTime = parseFloat(String(log.mean_plan_time ?? 0))
|
|
const calls = parseInt(String(log.calls ?? 0), 10)
|
|
|
|
return {
|
|
period_start: periodStart,
|
|
timestamp: log.timestamp,
|
|
query_latency: meanExecTime + meanPlanTime,
|
|
mean_time: meanExecTime,
|
|
min_time: (log.min_exec_time ?? 0) + (log.min_plan_time ?? 0),
|
|
max_time: (log.max_exec_time ?? 0) + (log.max_plan_time ?? 0),
|
|
stddev_time: 0,
|
|
p50_time: (log.p50_exec_time ?? 0) + (log.p50_plan_time ?? 0),
|
|
p95_time: (log.p95_exec_time ?? 0) + (log.p95_plan_time ?? 0),
|
|
rows_read: 0,
|
|
calls,
|
|
cache_hits: 0,
|
|
cache_misses: 0,
|
|
}
|
|
})
|
|
.filter((item): item is NonNullable<typeof item> => item !== null)
|
|
.sort((a, b) => a.period_start - b.period_start)
|
|
}
|
|
|
|
function normalizeQuery(query: string): string {
|
|
return query.replace(/\s+/g, ' ').trim()
|
|
}
|
|
|
|
export function aggregateLogsByQuery(parsedLogs: ParsedLogEntry[]): QueryPerformanceRow[] {
|
|
if (!parsedLogs || parsedLogs.length === 0) return []
|
|
|
|
const queryGroups = new Map<string, ParsedLogEntry[]>()
|
|
|
|
parsedLogs.forEach((log) => {
|
|
const query = normalizeQuery(log.query || '')
|
|
if (!query) return
|
|
|
|
if (!queryGroups.has(query)) {
|
|
queryGroups.set(query, [])
|
|
}
|
|
queryGroups.get(query)!.push(log)
|
|
})
|
|
|
|
const aggregatedData: QueryPerformanceRow[] = []
|
|
let totalExecutionTime = 0
|
|
|
|
const queryStats = Array.from(queryGroups.entries()).map(([query, logs]) => {
|
|
const count = logs.length
|
|
let totalCalls = 0
|
|
let totalExecTime = 0
|
|
let totalPlanTime = 0
|
|
let p95Sum = 0
|
|
let p95Count = 0
|
|
let minTime = Infinity
|
|
let maxTime = -Infinity
|
|
const rolname = logs[0]?.user_name || ''
|
|
const applicationName = logs[0]?.application_name || ''
|
|
let firstSeen = logs[0]?.timestamp ?? ''
|
|
|
|
logs.forEach((log) => {
|
|
if (log.timestamp && (!firstSeen || log.timestamp < firstSeen)) firstSeen = log.timestamp
|
|
const logCalls = parseInt(String(log.calls ?? 0), 10)
|
|
totalCalls += logCalls
|
|
totalExecTime += parseFloat(String(log.total_exec_time ?? 0))
|
|
totalPlanTime += parseFloat(String(log.total_plan_time ?? 0))
|
|
const logP95 =
|
|
parseFloat(String(log.p95_exec_time ?? 0)) + parseFloat(String(log.p95_plan_time ?? 0))
|
|
if (logP95 > 0) {
|
|
p95Sum += logP95
|
|
p95Count++
|
|
}
|
|
minTime = Math.min(
|
|
minTime,
|
|
parseFloat(String(log.min_exec_time ?? 0)) + parseFloat(String(log.min_plan_time ?? 0))
|
|
)
|
|
maxTime = Math.max(
|
|
maxTime,
|
|
parseFloat(String(log.max_exec_time ?? 0)) + parseFloat(String(log.max_plan_time ?? 0))
|
|
)
|
|
})
|
|
|
|
const totalTime = totalExecTime + totalPlanTime
|
|
const avgMeanTime = totalCalls > 0 ? totalTime / totalCalls : 0
|
|
const avgP95Time = p95Count > 0 ? p95Sum / p95Count : 0
|
|
const finalMinTime = minTime === Infinity ? 0 : minTime
|
|
const finalMaxTime = maxTime === -Infinity ? 0 : maxTime
|
|
|
|
totalExecutionTime += totalTime
|
|
|
|
return {
|
|
query,
|
|
rolname,
|
|
applicationName,
|
|
firstSeen,
|
|
count,
|
|
avgMeanTime,
|
|
avgP95Time,
|
|
minTime: finalMinTime,
|
|
maxTime: finalMaxTime,
|
|
totalCalls,
|
|
totalTime,
|
|
}
|
|
})
|
|
|
|
queryStats.forEach((stats) => {
|
|
const propTotalTime = totalExecutionTime > 0 ? (stats.totalTime / totalExecutionTime) * 100 : 0
|
|
|
|
aggregatedData.push({
|
|
query: stats.query as SafeSqlFragment,
|
|
rolname: stats.rolname,
|
|
application_name: stats.applicationName,
|
|
calls: stats.totalCalls,
|
|
mean_time: stats.avgMeanTime,
|
|
p95_time: stats.avgP95Time,
|
|
min_time: stats.minTime,
|
|
max_time: stats.maxTime,
|
|
total_time: stats.totalTime,
|
|
rows_read: 0,
|
|
cache_hit_rate: 0,
|
|
prop_total_time: propTotalTime,
|
|
index_advisor_result: null,
|
|
_total_cache_hits: 0,
|
|
_total_cache_misses: 0,
|
|
_count: stats.count,
|
|
first_seen: stats.firstSeen,
|
|
})
|
|
})
|
|
|
|
return aggregatedData.sort((a, b) => b.total_time - a.total_time)
|
|
}
|