Skip to content
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
64 changes: 19 additions & 45 deletions apps/webapp/app/presenters/v3/LogsListPresenter.server.ts
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🚩 Partition pruning with inserted_at is ineffective on the new search table

The LogsListPresenter applies inserted_at >= ... and inserted_at <= ... filters (lines 269-271, 282-284). On the old task_events_v2 table, these were effective for partition pruning since it used PARTITION BY toDate(inserted_at) (010_add_task_events_v2.sql:48). However, the new task_events_search_v1 table uses PARTITION BY toDate(triggered_timestamp) (016_add_task_events_search_v1.sql:26). The inserted_at filter will still execute correctly but won't help prune partitions, making it dead weight. For effective partition pruning, the time range filter should target triggered_timestamp instead. The start_time filters that are also applied are close to triggered_timestamp (since triggered_timestamp = start_time + duration) but won't trigger partition pruning either since they reference a different column. This won't cause incorrect results but may degrade query performance for large datasets.

(Refers to lines 266-289)

Open in Devin Review

Was this helpful? React with 👍 or 👎 to provide feedback.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🚩 CANCELLED status SPANs are unreachable by any level filter

The excluded_statuses parameter at LogsListPresenter.server.ts:300 always excludes ['ERROR', 'CANCELLED'] from the kinds branch of every level. This means:

  • INFO filter: kind IN ('LOG_INFO', 'LOG_LOG', 'SPAN') AND status NOT IN ('ERROR', 'CANCELLED') — excludes CANCELLED SPANs
  • ERROR filter: status IN ('ERROR') — doesn't match CANCELLED
  • No other level claims them

kindToLevel('SPAN', 'CANCELLED') at logUtils.ts:73-94 returns INFO (since CANCELLED isn't checked before the switch). So a SPAN with CANCELLED status is classified as INFO but excluded from the INFO filter. These rows would appear in the unfiltered view but become invisible once any level filter is applied. This may be acceptable since CANCELLED is a terminal state not typically surfaced in logs, but it's an inconsistency worth being aware of.

(Refers to lines 298-301)

Open in Devin Review

Was this helpful? React with 👍 or 👎 to provide feedback.

Copy link
Contributor Author

@mpcgrid mpcgrid Feb 12, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Will be figured out in the future after some alpha testing in prod. This status is not that used and we might not want to filter by it.

Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@ export type LogsListOptions = {
retentionLimitDays?: number;
// search
search?: string;
includeDebugLogs?: boolean;
// pagination
direction?: Direction;
cursor?: string;
Expand All @@ -69,7 +68,6 @@ export const LogsListOptionsSchema = z.object({
defaultPeriod: z.string().optional(),
retentionLimitDays: z.number().int().positive().optional(),
search: z.string().max(1000).optional(),
includeDebugLogs: z.boolean().optional(),
direction: z.enum(["forward", "backward"]).optional(),
cursor: z.string().optional(),
pageSize: z.number().int().positive().max(1000).optional(),
Expand All @@ -83,15 +81,17 @@ export type LogsListAppliedFilters = LogsList["filters"];

// Cursor is a base64 encoded JSON of the pagination keys
type LogCursor = {
organizationId: string;
environmentId: string;
unixTimestamp: number;
traceId: string;
triggeredTimestamp: string; // DateTime64(9) string
spanId: string;
};

const LogCursorSchema = z.object({
organizationId: z.string(),
environmentId: z.string(),
unixTimestamp: z.number(),
traceId: z.string(),
triggeredTimestamp: z.string(),
spanId: z.string(),
});

function encodeCursor(cursor: LogCursor): string {
Expand All @@ -116,7 +116,7 @@ function decodeCursor(cursor: string): LogCursor | null {
function levelToKindsAndStatuses(level: LogLevel): { kinds?: string[]; statuses?: string[] } {
switch (level) {
case "DEBUG":
return { kinds: ["DEBUG_EVENT", "LOG_DEBUG"] };
return { kinds: ["LOG_DEBUG"] };
case "INFO":
return { kinds: ["LOG_INFO", "LOG_LOG"] };
case "WARN":
Expand Down Expand Up @@ -166,7 +166,6 @@ export class LogsListPresenter extends BasePresenter {
to,
cursor,
pageSize = DEFAULT_PAGE_SIZE,
includeDebugLogs = true,
defaultPeriod,
retentionLimitDays,
}: LogsListOptions
Expand Down Expand Up @@ -252,7 +251,7 @@ export class LogsListPresenter extends BasePresenter {
);
}

const queryBuilder = this.clickhouse.taskEventsV2.logsListQueryBuilder();
const queryBuilder = this.clickhouse.taskEventsSearch.logsListQueryBuilder();

queryBuilder.where("environment_id = {environmentId: String}", {
environmentId,
Expand All @@ -270,10 +269,6 @@ export class LogsListPresenter extends BasePresenter {
queryBuilder.where("inserted_at >= {insertedAtStart: DateTime64(3)}", {
insertedAtStart: convertDateToClickhouseDateTime(effectiveFrom),
});

queryBuilder.where("start_time >= {fromTime: String}", {
fromTime: formatNanosecondsForClickhouse(fromNs),
});
}

if (effectiveTo) {
Expand All @@ -283,10 +278,6 @@ export class LogsListPresenter extends BasePresenter {
queryBuilder.where("inserted_at <= {insertedAtEnd: DateTime64(3)}", {
insertedAtEnd: convertDateToClickhouseDateTime(clampedTo),
});

queryBuilder.where("start_time <= {toTime: String}", {
toTime: formatNanosecondsForClickhouse(toNs),
});
}

// Task filter (applies directly to ClickHouse)
Expand Down Expand Up @@ -349,39 +340,22 @@ export class LogsListPresenter extends BasePresenter {
}
}

// Debug logs are available only to admins
if (includeDebugLogs === false) {
queryBuilder.where("kind NOT IN {debugKinds: Array(String)}", {
debugKinds: ["DEBUG_EVENT"],
});

queryBuilder.where("NOT ((kind = 'LOG_INFO') AND (attributes_text = '{}'))");
}

queryBuilder.where("kind NOT IN {debugSpans: Array(String)}", {
debugSpans: ["SPAN", "ANCESTOR_OVERRIDE", "SPAN_EVENT"],
});

// kindCondition += ` `;
// params["excluded_statuses"] = ["SPAN", "ANCESTOR_OVERRIDE", "SPAN_EVENT"];


queryBuilder.where("NOT (kind = 'SPAN' AND status = 'PARTIAL')");

// Cursor pagination
// Cursor pagination using explicit lexicographic comparison
// Must mirror the ORDER BY columns: (organization_id DESC, environment_id DESC, triggered_timestamp DESC, span_id DESC)
const decodedCursor = cursor ? decodeCursor(cursor) : null;
if (decodedCursor) {
// organization_id and environment_id are already pinned by equality filters above,
// so cursor only needs to compare triggered_timestamp and span_id
queryBuilder.where(
"(environment_id, toUnixTimestamp(start_time), trace_id) < ({cursorEnvId: String}, {cursorUnixTimestamp: Int64}, {cursorTraceId: String})",
`(triggered_timestamp < {cursorTriggeredTimestamp: String} OR (triggered_timestamp = {cursorTriggeredTimestamp: String} AND span_id < {cursorSpanId: String}))`,
{
cursorEnvId: decodedCursor.environmentId,
cursorUnixTimestamp: decodedCursor.unixTimestamp,
cursorTraceId: decodedCursor.traceId,
cursorTriggeredTimestamp: decodedCursor.triggeredTimestamp,
cursorSpanId: decodedCursor.spanId,
}
);
}

queryBuilder.orderBy("environment_id DESC, toUnixTimestamp(start_time) DESC, trace_id DESC");
queryBuilder.orderBy("triggered_timestamp DESC, span_id DESC");
// Limit + 1 to check if there are more results
queryBuilder.limit(pageSize + 1);

Expand All @@ -399,11 +373,11 @@ export class LogsListPresenter extends BasePresenter {
let nextCursor: string | undefined;
if (hasMore && logs.length > 0) {
const lastLog = logs[logs.length - 1];
const unixTimestamp = Math.floor(new Date(lastLog.start_time).getTime() / 1000);
nextCursor = encodeCursor({
organizationId,
environmentId,
unixTimestamp,
traceId: lastLog.trace_id,
triggeredTimestamp: lastLog.triggered_timestamp,
spanId: lastLog.span_id,
});
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ import {
ResizablePanel,
ResizablePanelGroup,
} from "~/components/primitives/Resizable";
import { Switch } from "~/components/primitives/Switch";
import { Button } from "~/components/primitives/Buttons";
import { FEATURE_FLAG, validateFeatureFlagValue } from "~/v3/featureFlags.server";

Expand Down Expand Up @@ -95,7 +94,6 @@ async function hasLogsPageAccess(
export const loader = async ({ request, params }: LoaderFunctionArgs) => {
const user = await requireUser(request);
const userId = user.id;
const isAdmin = user.admin || user.isImpersonating;

const { projectParam, organizationSlug, envParam } = EnvironmentParamSchema.parse(params);

Expand Down Expand Up @@ -126,7 +124,6 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => {
const runId = url.searchParams.get("runId") ?? undefined;
const search = url.searchParams.get("search") ?? undefined;
const levels = parseLevelsFromUrl(url);
const showDebug = url.searchParams.get("showDebug") === "true";
const period = url.searchParams.get("period") ?? undefined;
const fromStr = url.searchParams.get("from");
const toStr = url.searchParams.get("to");
Expand All @@ -150,7 +147,6 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => {
period,
from,
to,
includeDebugLogs: isAdmin && showDebug,
defaultPeriod: "1h",
retentionLimitDays
})
Expand All @@ -163,15 +159,13 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => {

return typeddefer({
data: listPromise,
isAdmin,
showDebug,
defaultPeriod: "1h",
retentionLimitDays,
});
};

export default function Page() {
const { data, isAdmin, showDebug, defaultPeriod, retentionLimitDays } =
const { data, defaultPeriod, retentionLimitDays } =
useTypedLoaderData<typeof loader>();

return (
Expand Down Expand Up @@ -199,8 +193,6 @@ export default function Page() {
errorElement={
<div className="grid h-full max-h-full grid-rows-[2.5rem_auto_1fr] overflow-hidden">
<FiltersBar
isAdmin={isAdmin}
showDebug={showDebug}
defaultPeriod={defaultPeriod}
retentionLimitDays={retentionLimitDays}
/>
Expand All @@ -218,8 +210,6 @@ export default function Page() {
return (
<div className="grid h-full max-h-full grid-rows-[2.5rem_auto_1fr] overflow-hidden">
<FiltersBar
isAdmin={isAdmin}
showDebug={showDebug}
defaultPeriod={defaultPeriod}
retentionLimitDays={retentionLimitDays}
/>
Expand All @@ -235,15 +225,11 @@ export default function Page() {
<div className="grid h-full max-h-full grid-rows-[2.5rem_1fr] overflow-hidden">
<FiltersBar
list={result}
isAdmin={isAdmin}
showDebug={showDebug}
defaultPeriod={defaultPeriod}
retentionLimitDays={retentionLimitDays}
/>
<LogsList
list={result}
isAdmin={isAdmin}
showDebug={showDebug}
defaultPeriod={defaultPeriod}
/>
</div>
Expand All @@ -258,14 +244,10 @@ export default function Page() {

function FiltersBar({
list,
isAdmin,
showDebug,
defaultPeriod,
retentionLimitDays,
}: {
list?: Exclude<Awaited<UseDataFunctionReturn<typeof loader>["data"]>, { error: string }>;
isAdmin: boolean;
showDebug: boolean;
defaultPeriod?: string;
retentionLimitDays: number;
}) {
Expand All @@ -280,16 +262,6 @@ function FiltersBar({
searchParams.has("from") ||
searchParams.has("to");

const handleDebugToggle = useCallback((checked: boolean) => {
const url = new URL(window.location.href);
if (checked) {
url.searchParams.set("showDebug", "true");
} else {
url.searchParams.delete("showDebug");
}
window.location.href = url.toString();
}, []);

return (
<div className="flex items-start justify-between gap-x-2 border-b border-grid-bright p-2">
<div className="flex flex-row flex-wrap items-center gap-1">
Expand Down Expand Up @@ -329,16 +301,6 @@ function FiltersBar({
</>
)}
</div>
<div className="flex items-center gap-2">
{isAdmin && (
<Switch
variant="small"
label="Debug"
checked={showDebug}
onCheckedChange={handleDebugToggle}
/>
)}
</div>
</div>
);
}
Expand All @@ -347,8 +309,6 @@ function LogsList({
list,
}: {
list: Exclude<Awaited<UseDataFunctionReturn<typeof loader>["data"]>, { error: string }>; //exclude error, it is handled
isAdmin: boolean;
showDebug: boolean;
defaultPeriod?: string;
}) {
const navigation = useNavigation();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@ import { EnvironmentParamSchema } from "~/utils/pathBuilder";
import { findProjectBySlug } from "~/models/project.server";
import { findEnvironmentBySlug } from "~/models/runtimeEnvironment.server";
import { clickhouseClient } from "~/services/clickhouseInstance.server";
import { convertClickhouseDateTime64ToJsDate } from "~/v3/eventRepository/clickhouseEventRepository.server";

// Convert ClickHouse kind to display level
function kindToLevel(
kind: string
): "TRACE" | "DEBUG" | "INFO" | "WARN" | "ERROR" | "LOG" {
switch (kind) {
case "DEBUG_EVENT":
case "LOG_DEBUG":
return "DEBUG";
case "LOG_INFO":
Expand All @@ -23,7 +23,6 @@ function kindToLevel(
case "LOG_LOG":
return "LOG";
case "SPAN":
case "ANCESTOR_OVERRIDE":
case "SPAN_EVENT":
default:
return "TRACE";
Expand Down Expand Up @@ -63,8 +62,11 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => {
}

// Query ClickHouse for related spans in the same trace
const queryBuilder = clickhouseClient.taskEventsV2.logsListQueryBuilder();
const queryBuilder = clickhouseClient.taskEventsSearch.logsListQueryBuilder();

queryBuilder.where("organization_id = {organizationId: String}", {
organizationId: project.organizationId,
});
queryBuilder.where("environment_id = {environmentId: String}", {
environmentId: environment.id,
});
Expand All @@ -91,7 +93,7 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => {
kind: row.kind,
level: kindToLevel(row.kind),
status: row.status,
startTime: new Date(Number(row.start_time) / 1_000_000).toISOString(),
startTime: convertClickhouseDateTime64ToJsDate(row.start_time).toISOString(),
duration: Number(row.duration),
isCurrent: row.span_id === currentSpanId,
}));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ function parseLevelsFromUrl(url: URL): LogLevel[] | undefined {
export const loader = async ({ request, params }: LoaderFunctionArgs) => {
const user = await requireUser(request);
const userId = user.id;
const isAdmin = user?.admin || user?.isImpersonating;

const { projectParam, organizationSlug, envParam } = EnvironmentParamSchema.parse(params);

Expand All @@ -46,7 +45,6 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => {
const search = url.searchParams.get("search") ?? undefined;
const cursor = url.searchParams.get("cursor") ?? undefined;
const levels = parseLevelsFromUrl(url);
const showDebug = url.searchParams.get("showDebug") === "true";
const period = url.searchParams.get("period") ?? undefined;
const fromStr = url.searchParams.get("from");
const toStr = url.searchParams.get("to");
Expand All @@ -67,7 +65,6 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => {
from,
to,
levels,
includeDebugLogs: isAdmin && showDebug,
defaultPeriod: "1h",
retentionLimitDays,
}) as any; // Validated by LogsListOptionsSchema at runtime
Expand Down
Loading
Loading