diff --git a/apps/dashboard/src/components/pulls/detail/pull-detail-header.tsx b/apps/dashboard/src/components/pulls/detail/pull-detail-header.tsx
index 523ca22..7a79ab5 100644
--- a/apps/dashboard/src/components/pulls/detail/pull-detail-header.tsx
+++ b/apps/dashboard/src/components/pulls/detail/pull-detail-header.tsx
@@ -1,19 +1,14 @@
import { FileIcon, GitCommitIcon, ReviewsIcon } from "@diffkit/icons";
-import { StatePill } from "@diffkit/ui/components/state-pill";
import {
Callout,
CalloutAction,
CalloutContent,
} from "@diffkit/ui/components/callout";
-import {
- Tooltip,
- TooltipContent,
- TooltipTrigger,
-} from "@diffkit/ui/components/tooltip";
+import { StatePill } from "@diffkit/ui/components/state-pill";
import { cn } from "@diffkit/ui/lib/utils";
import { Link } from "@tanstack/react-router";
-import { useCallback, useRef, useState } from "react";
import { DetailPageTitle } from "#/components/details/detail-page";
+import { CopyBadge } from "#/components/shared/copy-badge";
import type { PullDetail } from "#/lib/github.types";
import { getPrStateConfig } from "#/lib/pr-state";
@@ -146,42 +141,6 @@ export function PullDetailHeader({
const DIFF_BOX_COUNT = 5;
-function CopyBadge({
- value,
- canTruncate,
-}: {
- value: string;
- canTruncate?: boolean;
-}) {
- const [copied, setCopied] = useState(false);
- const timeoutRef = useRef
>(undefined);
-
- const handleClick = useCallback(() => {
- void navigator.clipboard.writeText(value);
- setCopied(true);
- clearTimeout(timeoutRef.current);
- timeoutRef.current = setTimeout(() => setCopied(false), 1500);
- }, [value]);
-
- return (
-
-
-
-
- Copied!
-
- );
-}
-
function DiffBoxes({
additions,
deletions,
diff --git a/apps/dashboard/src/components/repo/repo-activity-cards.tsx b/apps/dashboard/src/components/repo/repo-activity-cards.tsx
index d058fed..7de17ee 100644
--- a/apps/dashboard/src/components/repo/repo-activity-cards.tsx
+++ b/apps/dashboard/src/components/repo/repo-activity-cards.tsx
@@ -1,4 +1,5 @@
import {
+ ActionsIcon,
ChevronRightIcon,
CommentIcon,
GitPullRequestIcon,
@@ -8,18 +9,24 @@ import {
import { cn } from "@diffkit/ui/lib/utils";
import { useQuery } from "@tanstack/react-query";
import { Link } from "@tanstack/react-router";
+import {
+ CheckStateIcon,
+ getCheckState,
+} from "#/components/checks/check-state-icon";
import { formatRelativeTime } from "#/lib/format-relative-time";
import {
type GitHubQueryScope,
githubIssuesFromRepoQueryOptions,
githubPullsFromRepoQueryOptions,
githubRepoDiscussionsQueryOptions,
+ githubWorkflowRunsFromRepoQueryOptions,
} from "#/lib/github.query";
import type {
DiscussionSummary,
IssueSummary,
PullSummary,
RepoOverview,
+ WorkflowRun,
} from "#/lib/github.types";
import { getPrStateConfig } from "#/lib/pr-state";
import { useHasMounted } from "#/lib/use-has-mounted";
@@ -61,6 +68,15 @@ export function RepoActivityCards({
enabled: hasMounted,
});
+ const runsQuery = useQuery({
+ ...githubWorkflowRunsFromRepoQueryOptions(scope, {
+ owner,
+ repo,
+ perPage: 5,
+ }),
+ enabled: hasMounted,
+ });
+
const discussionsQuery = useQuery({
...githubRepoDiscussionsQueryOptions(scope, { owner, repo }),
enabled: hasMounted && !!repoData.hasDiscussions,
@@ -97,6 +113,15 @@ export function RepoActivityCards({
actionHref={`/${owner}/${repo}/issues/new`}
renderItem={(issue) => }
/>
+ (
+
+ )}
+ />
{repoData.hasDiscussions && (
+
+
+
+
+
{run.displayTitle}
+
+ #{run.runNumber} · {formatRelativeTime(run.updatedAt)}
+
+
+
+ );
+}
+
function DiscussionItem({
discussion,
}: {
diff --git a/apps/dashboard/src/components/shared/copy-badge.tsx b/apps/dashboard/src/components/shared/copy-badge.tsx
new file mode 100644
index 0000000..0d079a5
--- /dev/null
+++ b/apps/dashboard/src/components/shared/copy-badge.tsx
@@ -0,0 +1,54 @@
+import {
+ Tooltip,
+ TooltipContent,
+ TooltipTrigger,
+} from "@diffkit/ui/components/tooltip";
+import { cn } from "@diffkit/ui/lib/utils";
+import { useCallback, useEffect, useRef, useState } from "react";
+
+export function CopyBadge({
+ value,
+ canTruncate,
+ className,
+}: {
+ value: string;
+ canTruncate?: boolean;
+ className?: string;
+}) {
+ const [copied, setCopied] = useState(false);
+ const timeoutRef = useRef>(undefined);
+
+ useEffect(() => {
+ return () => clearTimeout(timeoutRef.current);
+ }, []);
+
+ const handleClick = useCallback(async () => {
+ try {
+ await navigator.clipboard.writeText(value);
+ setCopied(true);
+ clearTimeout(timeoutRef.current);
+ timeoutRef.current = setTimeout(() => setCopied(false), 1500);
+ } catch {
+ setCopied(false);
+ }
+ }, [value]);
+
+ return (
+
+
+
+
+ Copied!
+
+ );
+}
diff --git a/apps/dashboard/src/components/workflows/graph/build-layout.ts b/apps/dashboard/src/components/workflows/graph/build-layout.ts
new file mode 100644
index 0000000..855ef72
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/graph/build-layout.ts
@@ -0,0 +1,142 @@
+import type { Node } from "@xyflow/react";
+import type {
+ WorkflowDefinition,
+ WorkflowDefinitionJob,
+ WorkflowRunJob,
+} from "#/lib/github.types";
+import { COLUMN_GAP, NODE_WIDTH, ROW_GAP } from "./constants";
+import { buildNameMatcher, getAggregateState } from "./grouping";
+import { estimateNodeHeight } from "./height";
+import type { FlowNode, GraphEdge, JobNodeData, MatrixNodeData } from "./types";
+
+export type DefGraphLayout = {
+ nodes: FlowNode[];
+ edges: GraphEdge[];
+};
+
+export function buildLayoutFromDefinition(
+ jobs: WorkflowRunJob[],
+ definition: WorkflowDefinition,
+ collapsedIds: Set,
+): DefGraphLayout | null {
+ const matchedJobsByKey = new Map();
+ const claimedJobIds = new Set();
+
+ for (const yamlJob of definition.jobs) {
+ const matcher = buildNameMatcher(
+ yamlJob.nameTemplate,
+ yamlJob.key,
+ yamlJob.isMatrix,
+ );
+ const matches: WorkflowRunJob[] = [];
+ for (const job of jobs) {
+ if (claimedJobIds.has(job.id)) continue;
+ if (matcher(job.name)) {
+ matches.push(job);
+ claimedJobIds.add(job.id);
+ }
+ }
+ matchedJobsByKey.set(yamlJob.key, matches);
+ }
+
+ if (claimedJobIds.size === 0) return null;
+
+ type DefNode = {
+ key: string;
+ def: WorkflowDefinitionJob;
+ matched: WorkflowRunJob[];
+ };
+ const defNodes: DefNode[] = [];
+ for (const yamlJob of definition.jobs) {
+ const matched = matchedJobsByKey.get(yamlJob.key) ?? [];
+ if (matched.length > 0) {
+ defNodes.push({ key: yamlJob.key, def: yamlJob, matched });
+ }
+ }
+
+ const keyToDefNode = new Map(defNodes.map((n) => [n.key, n]));
+ const layerByKey = new Map();
+ const visiting = new Set();
+ const computeLayer = (key: string): number => {
+ const cached = layerByKey.get(key);
+ if (cached != null) return cached;
+ if (visiting.has(key)) return 0;
+ visiting.add(key);
+ const node = keyToDefNode.get(key);
+ const needs = node?.def.needs ?? [];
+ const validNeeds = needs.filter((n) => keyToDefNode.has(n));
+ const layer =
+ validNeeds.length > 0
+ ? Math.max(...validNeeds.map((n) => computeLayer(n))) + 1
+ : 0;
+ visiting.delete(key);
+ layerByKey.set(key, layer);
+ return layer;
+ };
+ for (const node of defNodes) computeLayer(node.key);
+
+ const layersMap = new Map();
+ for (const node of defNodes) {
+ const layer = layerByKey.get(node.key) ?? 0;
+ const bucket = layersMap.get(layer) ?? [];
+ bucket.push(node);
+ layersMap.set(layer, bucket);
+ }
+
+ const sortedLayers = [...layersMap.keys()].sort((a, b) => a - b);
+ const flowNodes: FlowNode[] = [];
+ for (const layer of sortedLayers) {
+ const layerNodes = layersMap.get(layer) ?? [];
+ const x = layer * (NODE_WIDTH + COLUMN_GAP);
+ let currentY = 0;
+ for (const node of layerNodes) {
+ const nodeId = `def-${node.key}`;
+ const isMatrixNode = node.matched.length > 1 || node.def.isMatrix;
+ let flowNode: FlowNode | null = null;
+ if (isMatrixNode) {
+ flowNode = {
+ id: nodeId,
+ type: "matrix",
+ position: { x, y: currentY },
+ data: {
+ baseName: node.key,
+ jobs: node.matched,
+ aggregate: getAggregateState(node.matched),
+ collapsed: collapsedIds.has(nodeId),
+ },
+ } satisfies Node;
+ } else {
+ const job = node.matched[0];
+ if (!job) continue;
+ flowNode = {
+ id: nodeId,
+ type: "job",
+ position: { x, y: currentY },
+ data: {
+ job,
+ collapsed: collapsedIds.has(nodeId),
+ },
+ } satisfies Node;
+ }
+ flowNodes.push(flowNode);
+ currentY += estimateNodeHeight(flowNode) + ROW_GAP;
+ }
+ }
+
+ const edges: GraphEdge[] = [];
+ for (const node of defNodes) {
+ for (const need of node.def.needs) {
+ if (!keyToDefNode.has(need)) continue;
+ const source = `def-${need}`;
+ const target = `def-${node.key}`;
+ edges.push({
+ id: `${source}->${target}`,
+ source,
+ target,
+ type: "smoothstep",
+ });
+ }
+ }
+
+ return { nodes: flowNodes, edges };
+}
diff --git a/apps/dashboard/src/components/workflows/graph/constants.ts b/apps/dashboard/src/components/workflows/graph/constants.ts
new file mode 100644
index 0000000..a4c8fc4
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/graph/constants.ts
@@ -0,0 +1,31 @@
+import { cn } from "@diffkit/ui/lib/utils";
+
+export const MATRIX_SUFFIX_RE = /^(.*?)\s*\(([^)]+)\)\s*$/;
+
+export const NODE_WIDTH = 300;
+export const COLUMN_GAP = 90;
+export const ROW_GAP = 20;
+export const VARIANT_POPUP_GAP = 40;
+export const STEP_LOG_WIDTH = 560;
+export const STEP_LOG_HEIGHT = 400;
+export const STEP_LOG_GAP = 60;
+
+export const H_JOB_HEADER = 36;
+export const H_STEP_ROW = 28;
+export const H_STEP_FIRST_LAST_EXTRA = 4;
+export const H_NO_STEPS = 32;
+export const H_BORDER = 1;
+export const H_MATRIX_STATS = 28;
+export const H_MATRIX_OUTER_PAD = 12;
+export const H_MATRIX_CARD_GAP = 6;
+export const H_MATRIX_PILL = 34;
+
+export const NODE_CARD_CLASS = cn(
+ "flex flex-col overflow-hidden rounded-lg border bg-background shadow-sm",
+ "transition-colors hover:border-foreground/20",
+);
+
+export const NODE_HEADER_CLASS =
+ "flex w-full items-center gap-2 px-3 py-2 text-left transition-colors hover:bg-muted/40 disabled:cursor-default disabled:hover:bg-transparent";
+
+export const NODE_HANDLE_CLASS = "!size-1 !border-0 !bg-transparent !opacity-0";
diff --git a/apps/dashboard/src/components/workflows/graph/edges.ts b/apps/dashboard/src/components/workflows/graph/edges.ts
new file mode 100644
index 0000000..7fcd46f
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/graph/edges.ts
@@ -0,0 +1,31 @@
+import type { GraphEdge } from "./types";
+
+export function collectConnectedEdgeIds(
+ nodeId: string,
+ edges: GraphEdge[],
+): Set {
+ const result = new Set();
+ const forward = [nodeId];
+ while (forward.length > 0) {
+ const current = forward.pop();
+ if (!current) break;
+ for (const edge of edges) {
+ if (edge.source === current && !result.has(edge.id)) {
+ result.add(edge.id);
+ forward.push(edge.target);
+ }
+ }
+ }
+ const backward = [nodeId];
+ while (backward.length > 0) {
+ const current = backward.pop();
+ if (!current) break;
+ for (const edge of edges) {
+ if (edge.target === current && !result.has(edge.id)) {
+ result.add(edge.id);
+ backward.push(edge.source);
+ }
+ }
+ }
+ return result;
+}
diff --git a/apps/dashboard/src/components/workflows/graph/format.ts b/apps/dashboard/src/components/workflows/graph/format.ts
new file mode 100644
index 0000000..848b6a8
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/graph/format.ts
@@ -0,0 +1,29 @@
+import type { WorkflowRunJob } from "#/lib/github.types";
+
+export function formatDuration(
+ startedAt: string | null,
+ completedAt: string | null,
+ now?: number,
+): string | null {
+ if (!startedAt) return null;
+ const startMs = new Date(startedAt).getTime();
+ if (Number.isNaN(startMs)) return null;
+ const endMs = completedAt
+ ? new Date(completedAt).getTime()
+ : (now ?? Date.now());
+ if (Number.isNaN(endMs)) return null;
+ const totalSeconds = Math.max(0, Math.floor((endMs - startMs) / 1000));
+ if (totalSeconds < 60) return `${totalSeconds}s`;
+ const minutes = Math.floor(totalSeconds / 60);
+ const seconds = totalSeconds % 60;
+ if (minutes < 60) return `${minutes}m ${seconds}s`;
+ const hours = Math.floor(minutes / 60);
+ return `${hours}h ${minutes % 60}m`;
+}
+
+export function formatJobDuration(
+ job: WorkflowRunJob,
+ now?: number,
+): string | null {
+ return formatDuration(job.startedAt, job.completedAt, now);
+}
diff --git a/apps/dashboard/src/components/workflows/graph/graph-config-context.ts b/apps/dashboard/src/components/workflows/graph/graph-config-context.ts
new file mode 100644
index 0000000..7bb38d5
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/graph/graph-config-context.ts
@@ -0,0 +1,20 @@
+import { createContext, useContext } from "react";
+import type { GitHubQueryScope } from "#/lib/github.query";
+
+export type GraphConfig = {
+ scope: GitHubQueryScope;
+ owner: string;
+ repo: string;
+ runId: number;
+};
+
+const GraphConfigContext = createContext(null);
+
+export const GraphConfigProvider = GraphConfigContext.Provider;
+
+export function useGraphConfig(): GraphConfig {
+ const ctx = useContext(GraphConfigContext);
+ if (!ctx)
+ throw new Error("useGraphConfig must be used inside GraphConfigProvider");
+ return ctx;
+}
diff --git a/apps/dashboard/src/components/workflows/graph/graph-controls.tsx b/apps/dashboard/src/components/workflows/graph/graph-controls.tsx
new file mode 100644
index 0000000..9fa4d9f
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/graph/graph-controls.tsx
@@ -0,0 +1,65 @@
+import {
+ CenterFocusIcon,
+ FullScreenIcon,
+ MinusSignIcon,
+ PlusSignIcon,
+} from "@diffkit/icons";
+import { cn } from "@diffkit/ui/lib/utils";
+import { ControlButton, Controls, useReactFlow } from "@xyflow/react";
+
+const CONTROL_BUTTON_CLASS = cn(
+ "!flex !size-7 !items-center !justify-center !rounded-md !border-0 !bg-transparent !p-0 !text-secondary-foreground/70",
+ "hover:!bg-secondary-foreground/10 hover:!text-secondary-foreground",
+ "[&_svg]:!h-3.5 [&_svg]:!w-3.5 [&_svg]:!max-h-none [&_svg]:!max-w-none [&_svg]:!fill-none",
+);
+
+export function GraphControls({
+ isFullscreen,
+ onToggleFullscreen,
+}: {
+ isFullscreen: boolean;
+ onToggleFullscreen: () => void;
+}) {
+ const { zoomIn, zoomOut, fitView } = useReactFlow();
+ return (
+
+ zoomIn()}
+ aria-label="Zoom in"
+ className={CONTROL_BUTTON_CLASS}
+ >
+
+
+ zoomOut()}
+ aria-label="Zoom out"
+ className={CONTROL_BUTTON_CLASS}
+ >
+
+
+ fitView({ duration: 200 })}
+ aria-label="Center"
+ className={CONTROL_BUTTON_CLASS}
+ >
+
+
+
+
+
+
+ );
+}
diff --git a/apps/dashboard/src/components/workflows/graph/grouping.ts b/apps/dashboard/src/components/workflows/graph/grouping.ts
new file mode 100644
index 0000000..b78554a
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/graph/grouping.ts
@@ -0,0 +1,85 @@
+import {
+ type CheckState,
+ getCheckState,
+} from "#/components/checks/check-state-icon";
+import type { WorkflowRunJob } from "#/lib/github.types";
+import { MATRIX_SUFFIX_RE } from "./constants";
+import type { JobGroup } from "./types";
+
+export function getGroupId(group: JobGroup): string {
+ if (group.kind === "matrix") return `matrix-${group.baseName}`;
+ return `job-${group.job.id}`;
+}
+
+export function groupJobs(jobs: WorkflowRunJob[]): JobGroup[] {
+ const bucket = new Map();
+ const order: string[] = [];
+
+ for (const job of jobs) {
+ const match = MATRIX_SUFFIX_RE.exec(job.name);
+ const key = match ? match[1] : job.name;
+ if (!bucket.has(key)) {
+ bucket.set(key, []);
+ order.push(key);
+ }
+ bucket.get(key)?.push(job);
+ }
+
+ return order.map((key) => {
+ const group = bucket.get(key) ?? [];
+ if (group.length > 1) {
+ return { kind: "matrix", baseName: key, jobs: group };
+ }
+ const job = group[0];
+ if (!job) return { kind: "matrix", baseName: key, jobs: [] };
+ return { kind: "single", job };
+ });
+}
+
+export function buildColumns(groups: JobGroup[]): JobGroup[][] {
+ const columns: JobGroup[][] = [];
+ let runningSingles: JobGroup[] = [];
+ for (const group of groups) {
+ if (group.kind === "matrix") {
+ if (runningSingles.length > 0) {
+ columns.push(runningSingles);
+ runningSingles = [];
+ }
+ columns.push([group]);
+ } else {
+ runningSingles.push(group);
+ }
+ }
+ if (runningSingles.length > 0) columns.push(runningSingles);
+ return columns;
+}
+
+export function getAggregateState(jobs: WorkflowRunJob[]): CheckState {
+ const states = jobs.map((j) => getCheckState(j));
+ if (states.some((s) => s === "failure")) return "failure";
+ if (states.some((s) => s === "pending" || s === "expected")) return "pending";
+ if (states.some((s) => s === "waiting")) return "waiting";
+ if (states.length > 0 && states.every((s) => s === "skipped")) {
+ return "skipped";
+ }
+ return "success";
+}
+
+export function buildNameMatcher(
+ template: string | null,
+ key: string,
+ isMatrix: boolean,
+): (name: string) => boolean {
+ if (template) {
+ const PLACEHOLDER = "\x00";
+ const withPlaceholder = template.replace(/\$\{\{[^}]*\}\}/g, PLACEHOLDER);
+ const escaped = withPlaceholder.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
+ const pattern = `^${escaped.split(PLACEHOLDER).join(".+?")}$`;
+ const re = new RegExp(pattern);
+ return (name) => re.test(name);
+ }
+ if (isMatrix) {
+ return (name) => name === key || name.startsWith(`${key} (`);
+ }
+ return (name) => name === key;
+}
diff --git a/apps/dashboard/src/components/workflows/graph/height.ts b/apps/dashboard/src/components/workflows/graph/height.ts
new file mode 100644
index 0000000..f342b1a
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/graph/height.ts
@@ -0,0 +1,49 @@
+import type { WorkflowRunJob } from "#/lib/github.types";
+import {
+ H_BORDER,
+ H_JOB_HEADER,
+ H_MATRIX_CARD_GAP,
+ H_MATRIX_OUTER_PAD,
+ H_MATRIX_PILL,
+ H_MATRIX_STATS,
+ H_NO_STEPS,
+ H_STEP_FIRST_LAST_EXTRA,
+ H_STEP_ROW,
+ STEP_LOG_HEIGHT,
+} from "./constants";
+import type { FlowNode } from "./types";
+
+export function estimateJobCardHeight(
+ job: WorkflowRunJob,
+ expanded: boolean,
+): number {
+ if (!expanded) return H_JOB_HEADER;
+ if (job.steps.length === 0) return H_JOB_HEADER + H_BORDER + H_NO_STEPS;
+ return (
+ H_JOB_HEADER +
+ H_BORDER +
+ job.steps.length * H_STEP_ROW +
+ H_STEP_FIRST_LAST_EXTRA
+ );
+}
+
+export function estimateMatrixHeight(
+ jobs: WorkflowRunJob[],
+ expanded: boolean,
+): number {
+ const main = H_JOB_HEADER + (expanded ? H_BORDER + H_MATRIX_STATS : 0);
+ if (!expanded) return H_MATRIX_OUTER_PAD + main;
+ return (
+ H_MATRIX_OUTER_PAD +
+ main +
+ jobs.length * H_MATRIX_PILL +
+ Math.max(0, jobs.length - 1) * H_MATRIX_CARD_GAP
+ );
+}
+
+export function estimateNodeHeight(node: FlowNode): number {
+ if (node.type === "stepLog") return STEP_LOG_HEIGHT;
+ if (node.type === "matrix")
+ return estimateMatrixHeight(node.data.jobs, !node.data.collapsed);
+ return estimateJobCardHeight(node.data.job, !node.data.collapsed);
+}
diff --git a/apps/dashboard/src/components/workflows/graph/hover-context.ts b/apps/dashboard/src/components/workflows/graph/hover-context.ts
new file mode 100644
index 0000000..46af555
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/graph/hover-context.ts
@@ -0,0 +1,9 @@
+import { createContext, useContext } from "react";
+
+const NodeHoverContext = createContext(null);
+
+export const NodeHoverProvider = NodeHoverContext.Provider;
+
+export function useIsNodeHovered(nodeId: string): boolean {
+ return useContext(NodeHoverContext) === nodeId;
+}
diff --git a/apps/dashboard/src/components/workflows/graph/job-card.tsx b/apps/dashboard/src/components/workflows/graph/job-card.tsx
new file mode 100644
index 0000000..12a6b1c
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/graph/job-card.tsx
@@ -0,0 +1,144 @@
+import { ChevronDownIcon, ExternalLinkIcon } from "@diffkit/icons";
+import { cn } from "@diffkit/ui/lib/utils";
+import { Link } from "@tanstack/react-router";
+import { useCallback } from "react";
+import {
+ type CheckState,
+ CheckStateIcon,
+ getCheckState,
+} from "#/components/checks/check-state-icon";
+import type { WorkflowRunJob, WorkflowRunStep } from "#/lib/github.types";
+import { NODE_CARD_CLASS, NODE_HEADER_CLASS, NODE_WIDTH } from "./constants";
+import { useGraphConfig } from "./graph-config-context";
+import { JobDuration } from "./job-duration";
+import { useStepLogActions } from "./step-log-context";
+
+export function getJobCardRingClass(state: CheckState): string {
+ if (state === "success") return "ring-4 ring-muted/80 dark:ring-muted/50";
+ if (state === "failure")
+ return "border-transparent hover:border-transparent ring-4 ring-red-500/25";
+ if (state === "pending" || state === "expected")
+ return "border-transparent hover:border-transparent ring-4 ring-amber-500/20";
+ return "";
+}
+
+export function NodeChevron({ open }: { open: boolean }) {
+ return (
+
+ );
+}
+
+function StepRow({
+ step,
+ job,
+ sourceNodeId,
+}: {
+ step: WorkflowRunStep;
+ job: WorkflowRunJob;
+ sourceNodeId: string;
+}) {
+ const state = getCheckState(step);
+ const { open } = useStepLogActions();
+ const onClick = useCallback(() => {
+ open({
+ jobId: job.id,
+ jobStatus: job.status,
+ stepNumber: step.number,
+ stepName: step.name,
+ sourceNodeId,
+ });
+ }, [open, job.id, job.status, step.number, step.name, sourceNodeId]);
+ return (
+
+ );
+}
+
+export function JobCard({
+ job,
+ nodeId,
+ displayName,
+ expanded,
+ onToggle,
+}: {
+ job: WorkflowRunJob;
+ nodeId: string;
+ displayName?: string;
+ expanded: boolean;
+ onToggle?: () => void;
+}) {
+ const state = getCheckState(job);
+ const name = displayName ?? job.name;
+ const { owner, repo, runId } = useGraphConfig();
+ return (
+
+
+
+ e.stopPropagation()}
+ className="absolute top-1.5 right-1.5 rounded-md bg-background/80 p-1 text-muted-foreground opacity-0 shadow-sm transition-opacity hover:text-foreground group-hover/card:opacity-100"
+ >
+
+
+
+ {expanded ? (
+
+ {job.steps.length === 0 ? (
+
No steps
+ ) : (
+ job.steps.map((step) => (
+
+ ))
+ )}
+
+ ) : null}
+
+ );
+}
diff --git a/apps/dashboard/src/components/workflows/graph/job-duration.tsx b/apps/dashboard/src/components/workflows/graph/job-duration.tsx
new file mode 100644
index 0000000..bc74f45
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/graph/job-duration.tsx
@@ -0,0 +1,41 @@
+import type { WorkflowRunJob } from "#/lib/github.types";
+import { useNow } from "#/lib/use-now";
+import { formatJobDuration } from "./format";
+
+export function JobDuration({
+ job,
+ className,
+}: {
+ job: WorkflowRunJob;
+ className?: string;
+}) {
+ const isLive = !!job.startedAt && !job.completedAt;
+ return isLive ? (
+
+ ) : (
+
+ );
+}
+
+function LiveJobDuration({
+ job,
+ className,
+}: {
+ job: WorkflowRunJob;
+ className?: string;
+}) {
+ const now = useNow();
+ const text = formatJobDuration(job, now);
+ return text ? {text} : null;
+}
+
+function StaticJobDuration({
+ job,
+ className,
+}: {
+ job: WorkflowRunJob;
+ className?: string;
+}) {
+ const text = formatJobDuration(job);
+ return text ? {text} : null;
+}
diff --git a/apps/dashboard/src/components/workflows/graph/job-node.tsx b/apps/dashboard/src/components/workflows/graph/job-node.tsx
new file mode 100644
index 0000000..8bde443
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/graph/job-node.tsx
@@ -0,0 +1,33 @@
+import { Handle, type Node, type NodeProps, Position } from "@xyflow/react";
+import { useCallback } from "react";
+import { NODE_HANDLE_CLASS } from "./constants";
+import { JobCard } from "./job-card";
+import { useNodeToggle } from "./toggle-context";
+import type { JobNodeData } from "./types";
+
+export function JobNode({ id, data }: NodeProps>) {
+ const toggle = useNodeToggle();
+ const onToggle = useCallback(() => toggle(id), [id, toggle]);
+ const canToggle = data.toggleable !== false;
+ const expanded = !data.collapsed;
+ return (
+ <>
+
+
+
+ >
+ );
+}
diff --git a/apps/dashboard/src/components/workflows/graph/matrix-node.tsx b/apps/dashboard/src/components/workflows/graph/matrix-node.tsx
new file mode 100644
index 0000000..1effed7
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/graph/matrix-node.tsx
@@ -0,0 +1,101 @@
+import { cn } from "@diffkit/ui/lib/utils";
+import { Handle, type Node, type NodeProps, Position } from "@xyflow/react";
+import { useCallback, useMemo } from "react";
+import {
+ CheckStateIcon,
+ getCheckState,
+} from "#/components/checks/check-state-icon";
+import {
+ MATRIX_SUFFIX_RE,
+ NODE_CARD_CLASS,
+ NODE_HANDLE_CLASS,
+ NODE_HEADER_CLASS,
+ NODE_WIDTH,
+} from "./constants";
+import { NodeChevron } from "./job-card";
+import { JobDuration } from "./job-duration";
+import { useNodeToggle } from "./toggle-context";
+import type { MatrixNodeData } from "./types";
+
+export function MatrixNode({
+ id,
+ data,
+}: NodeProps>) {
+ const toggle = useNodeToggle();
+ const onToggle = useCallback(() => toggle(id), [id, toggle]);
+ const canToggle = data.toggleable !== false;
+ const expanded = !data.collapsed;
+ const completedCount = useMemo(
+ () => data.jobs.filter((j) => j.status === "completed").length,
+ [data.jobs],
+ );
+ return (
+ <>
+
+
+
+
+ {expanded ? (
+
+ Matrix
+
+ {completedCount} / {data.jobs.length} completed
+
+
+ ) : null}
+
+ {expanded
+ ? data.jobs.map((job) => {
+ const match = MATRIX_SUFFIX_RE.exec(job.name);
+ const variant = match ? `(${match[2]})` : job.name;
+ return (
+
+
+
+ {variant}
+
+
+
+ );
+ })
+ : null}
+
+
+ >
+ );
+}
diff --git a/apps/dashboard/src/components/workflows/graph/parse-step-log.ts b/apps/dashboard/src/components/workflows/graph/parse-step-log.ts
new file mode 100644
index 0000000..b6e4af2
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/graph/parse-step-log.ts
@@ -0,0 +1,227 @@
+const TS_PREFIX_RE = /^(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+Z)\s(.*)$/;
+const GROUP_RE = /^##\[group\](.*)$/;
+const ENDGROUP_RE = /^##\[endgroup\]/;
+
+export type LogLine = {
+ ts: string | null;
+ text: string;
+};
+
+export type LogEntry =
+ | { kind: "line"; ts: string | null; text: string }
+ | {
+ kind: "group";
+ id: string;
+ name: string;
+ ts: string | null;
+ children: LogEntry[];
+ };
+
+function stripTimestamp(line: string): LogLine {
+ const m = line.match(TS_PREFIX_RE);
+ if (!m) return { ts: null, text: line };
+ return { ts: m[1] ?? null, text: m[2] ?? "" };
+}
+
+function normalizeName(name: string): string {
+ return name.trim().toLowerCase().replace(/\s+/g, " ");
+}
+
+function matchesStep(groupName: string, stepName: string): boolean {
+ const g = normalizeName(groupName);
+ const s = normalizeName(stepName);
+ if (!s) return false;
+ if (g === s) return true;
+ if (g === `run ${s}`) return true;
+ return false;
+}
+
+export type StepLogRange = {
+ startedAt?: string | null;
+ completedAt?: string | null;
+};
+
+function extractByGroup(lines: string[], stepName: string): LogEntry[] {
+ const root: LogEntry[] = [];
+ const stack: LogEntry[][] = [root];
+ let capturing = false;
+ let depth = 0;
+ let groupCounter = 0;
+
+ for (const raw of lines) {
+ const parsed = stripTimestamp(raw);
+ const { text, ts } = parsed;
+
+ if (!capturing) {
+ const gm = text.match(GROUP_RE);
+ if (gm && matchesStep(gm[1] ?? "", stepName)) {
+ capturing = true;
+ depth = 1;
+ }
+ continue;
+ }
+
+ const gm = text.match(GROUP_RE);
+ if (gm) {
+ depth++;
+ groupCounter++;
+ const group: LogEntry = {
+ kind: "group",
+ id: `g-${groupCounter}`,
+ name: gm[1] ?? "",
+ ts,
+ children: [],
+ };
+ const parent = stack[stack.length - 1];
+ if (parent) parent.push(group);
+ stack.push(group.children);
+ continue;
+ }
+ if (ENDGROUP_RE.test(text)) {
+ depth--;
+ if (depth <= 0) {
+ capturing = false;
+ continue;
+ }
+ if (stack.length > 1) stack.pop();
+ continue;
+ }
+ const target = stack[stack.length - 1];
+ if (target) target.push({ kind: "line", ts, text });
+ }
+ return root;
+}
+
+function extractByTimeRange(lines: string[], range: StepLogRange): LogEntry[] {
+ const startMs = range.startedAt ? Date.parse(range.startedAt) : null;
+ const endMs = range.completedAt ? Date.parse(range.completedAt) : null;
+ if (startMs == null && endMs == null) return [];
+
+ const root: LogEntry[] = [];
+ const stack: LogEntry[][] = [root];
+ let groupCounter = 0;
+
+ for (const raw of lines) {
+ const parsed = stripTimestamp(raw);
+ if (!parsed.ts) continue;
+ const t = Date.parse(parsed.ts);
+ if (!Number.isFinite(t)) continue;
+ if (startMs != null && t < startMs) continue;
+ if (endMs != null && t > endMs) continue;
+
+ const { text, ts } = parsed;
+ const gm = text.match(GROUP_RE);
+ if (gm) {
+ groupCounter++;
+ const group: LogEntry = {
+ kind: "group",
+ id: `g-${groupCounter}`,
+ name: gm[1] ?? "",
+ ts,
+ children: [],
+ };
+ const parent = stack[stack.length - 1];
+ if (parent) parent.push(group);
+ stack.push(group.children);
+ continue;
+ }
+ if (ENDGROUP_RE.test(text)) {
+ if (stack.length > 1) stack.pop();
+ continue;
+ }
+ const target = stack[stack.length - 1];
+ if (target) target.push({ kind: "line", ts, text });
+ }
+ return root;
+}
+
+export type ExtractStrategy = "group" | "time-range" | "empty";
+
+export type ExtractResult = {
+ entries: LogEntry[];
+ strategy: ExtractStrategy;
+};
+
+export function splitLogLines(fullLog: string): string[] {
+ return fullLog.split(/\r?\n/);
+}
+
+export function extractStepLog(
+ source: string | string[],
+ stepName: string,
+ range?: StepLogRange,
+): ExtractResult {
+ const lines =
+ typeof source === "string"
+ ? source.length === 0
+ ? null
+ : splitLogLines(source)
+ : source.length === 0
+ ? null
+ : source;
+ if (!lines) return { entries: [], strategy: "empty" };
+ const byGroup = extractByGroup(lines, stepName);
+ if (byGroup.length > 0) return { entries: byGroup, strategy: "group" };
+ if (range) {
+ const byTime = extractByTimeRange(lines, range);
+ if (byTime.length > 0) return { entries: byTime, strategy: "time-range" };
+ }
+ return { entries: [], strategy: "empty" };
+}
+
+/** Build nested LogEntry[] from a pre-extracted step's log text.
+ * Used when the source already contains exactly one step's content (e.g. the
+ * per-step `.txt` file from the run-level zip), so no name/time-range matching
+ * is needed — we only collapse `##[group]…##[endgroup]` into nested entries. */
+export function parseStepLogContent(text: string): LogEntry[] {
+ if (!text) return [];
+ const root: LogEntry[] = [];
+ const stack: LogEntry[][] = [root];
+ let groupCounter = 0;
+ for (const raw of splitLogLines(text)) {
+ const { ts, text: line } = stripTimestamp(raw);
+ const gm = line.match(GROUP_RE);
+ if (gm) {
+ groupCounter++;
+ const group: LogEntry = {
+ kind: "group",
+ id: `g-${groupCounter}`,
+ name: gm[1] ?? "",
+ ts,
+ children: [],
+ };
+ const parent = stack[stack.length - 1];
+ if (parent) parent.push(group);
+ stack.push(group.children);
+ continue;
+ }
+ if (ENDGROUP_RE.test(line)) {
+ if (stack.length > 1) stack.pop();
+ continue;
+ }
+ const target = stack[stack.length - 1];
+ if (target) target.push({ kind: "line", ts, text: line });
+ }
+ return root;
+}
+
+export function collectGroupHeaders(fullLog: string, limit = 40): string[] {
+ const out: string[] = [];
+ for (const raw of fullLog.split(/\r?\n/)) {
+ const { text } = stripTimestamp(raw);
+ if (text.startsWith("##[group]")) {
+ out.push(text.slice("##[group]".length));
+ if (out.length >= limit) break;
+ }
+ }
+ return out;
+}
+
+export function countEntryLines(entries: LogEntry[]): number {
+ let n = 0;
+ for (const e of entries) {
+ if (e.kind === "line") n++;
+ else n += 1 + countEntryLines(e.children);
+ }
+ return n;
+}
diff --git a/apps/dashboard/src/components/workflows/graph/step-log-content.tsx b/apps/dashboard/src/components/workflows/graph/step-log-content.tsx
new file mode 100644
index 0000000..c7bb4dc
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/graph/step-log-content.tsx
@@ -0,0 +1,345 @@
+import { ChevronDownIcon, ChevronRightIcon } from "@diffkit/icons";
+import { Spinner } from "@diffkit/ui/components/spinner";
+import { cn } from "@diffkit/ui/lib/utils";
+import { memo, useCallback, useEffect, useMemo, useRef, useState } from "react";
+import { countEntryLines, type LogEntry } from "./parse-step-log";
+
+function collectGroupIds(entries: LogEntry[], out: Set): void {
+ for (const entry of entries) {
+ if (entry.kind === "group") {
+ out.add(entry.id);
+ collectGroupIds(entry.children, out);
+ }
+ }
+}
+
+export function StepLogContent({
+ entries,
+ totalLineCount,
+ isLoading,
+ isError = false,
+ notAvailable,
+ hasLogs,
+ isStepLive,
+ scrollable = true,
+}: {
+ entries: LogEntry[];
+ totalLineCount: number;
+ isLoading: boolean;
+ isError?: boolean;
+ notAvailable: boolean;
+ hasLogs: boolean;
+ isStepLive: boolean;
+ scrollable?: boolean;
+}) {
+ const scrollRef = useRef(null);
+ const [collapsed, setCollapsed] = useState>(() => {
+ const ids = new Set();
+ collectGroupIds(entries, ids);
+ return ids;
+ });
+ const seenGroupIdsRef = useRef>(collapsed);
+
+ useEffect(() => {
+ const ids = new Set();
+ collectGroupIds(entries, ids);
+ const newIds: string[] = [];
+ for (const id of ids) {
+ if (!seenGroupIdsRef.current.has(id)) {
+ newIds.push(id);
+ seenGroupIdsRef.current.add(id);
+ }
+ }
+ if (newIds.length === 0) return;
+ setCollapsed((prev) => {
+ const next = new Set(prev);
+ for (const id of newIds) next.add(id);
+ return next;
+ });
+ }, [entries]);
+
+ const toggleGroup = useCallback((id: string) => {
+ setCollapsed((prev) => {
+ const next = new Set(prev);
+ if (next.has(id)) next.delete(id);
+ else next.add(id);
+ return next;
+ });
+ }, []);
+
+ // biome-ignore lint/correctness/useExhaustiveDependencies: intentionally re-scrolls when line count changes
+ useEffect(() => {
+ if (!isStepLive || !scrollable) return;
+ const el = scrollRef.current;
+ if (!el) return;
+ el.scrollTop = el.scrollHeight;
+ }, [isStepLive, scrollable, totalLineCount]);
+
+ const lineNoWidth = useMemo(
+ () => `${Math.max(2, String(totalLineCount).length)}ch`,
+ [totalLineCount],
+ );
+
+ const statusClass = scrollable
+ ? "flex flex-1 items-center justify-center text-muted-foreground text-xs"
+ : "flex items-center justify-center px-4 py-8 text-muted-foreground text-xs";
+
+ if (isLoading && !hasLogs) {
+ return (
+
+
+ Loading logs…
+
+ );
+ }
+
+ if (isError && !hasLogs) {
+ return (
+
+ Failed to load logs. Try refreshing.
+
+ );
+ }
+
+ if (notAvailable) {
+ return (
+
+ Logs are not available yet. They become available once the job starts or
+ after completion.
+
+ );
+ }
+
+ if (!hasLogs) {
+ return (
+
+ No log output for this step yet.
+
+ );
+ }
+
+ const counter = { value: 0 };
+ return (
+
+
+
+ );
+}
+
+type Counter = { value: number };
+
+function EntryList({
+ entries,
+ depth,
+ counter,
+ collapsed,
+ onToggle,
+ lineNoWidth,
+}: {
+ entries: LogEntry[];
+ depth: number;
+ counter: Counter;
+ collapsed: Set;
+ onToggle: (id: string) => void;
+ lineNoWidth: string;
+}) {
+ return (
+ <>
+ {entries.map((entry, idx) => {
+ if (entry.kind === "line") {
+ counter.value += 1;
+ return (
+
+ );
+ }
+ counter.value += 1;
+ const headerLineNumber = counter.value;
+ const isOpen = !collapsed.has(entry.id);
+ const header = (
+ onToggle(entry.id)}
+ lineNoWidth={lineNoWidth}
+ />
+ );
+ if (!isOpen) {
+ counter.value += countEntryLines(entry.children);
+ return header;
+ }
+ return (
+
+ {header}
+
+
+ );
+ })}
+ >
+ );
+}
+
+type LogLevel = "error" | "warning" | "notice" | "debug" | null;
+
+type ParsedLogLine = {
+ level: LogLevel;
+ body: string;
+};
+
+const LEVEL_BRACKET_RE = /^##\[(error|warning|notice|debug)\](.*)$/;
+const LEVEL_WF_CMD_RE = /^::(error|warning|notice|debug)(?:\s[^:]*)?::(.*)$/;
+
+function parseLogLine(text: string): ParsedLogLine {
+ const bm = text.match(LEVEL_BRACKET_RE);
+ if (bm) {
+ return { level: bm[1] as LogLevel, body: bm[2] ?? "" };
+ }
+ const wm = text.match(LEVEL_WF_CMD_RE);
+ if (wm) {
+ return { level: wm[1] as LogLevel, body: wm[2] ?? "" };
+ }
+ return { level: null, body: text };
+}
+
+const LEVEL_LABELS: Record, string> = {
+ error: "Error:",
+ warning: "Warning:",
+ notice: "Notice:",
+ debug: "Debug:",
+};
+
+const LEVEL_ROW_BG: Record, string> = {
+ error: "bg-red-500/10",
+ warning: "bg-amber-500/10",
+ notice: "bg-blue-500/10",
+ debug: "bg-muted/40",
+};
+
+const LEVEL_LINE_NO: Record, string> = {
+ error: "text-red-500",
+ warning: "text-amber-500",
+ notice: "text-blue-500",
+ debug: "text-muted-foreground",
+};
+
+const LEVEL_LABEL_TEXT: Record, string> = {
+ error: "text-red-500 dark:text-red-400",
+ warning: "text-amber-600 dark:text-amber-400",
+ notice: "text-blue-600 dark:text-blue-400",
+ debug: "text-muted-foreground",
+};
+
+const LogRow = memo(function LogRow({
+ text,
+ lineNumber,
+ depth,
+ lineNoWidth,
+}: {
+ text: string;
+ lineNumber: number;
+ depth: number;
+ lineNoWidth: string;
+}) {
+ const { level, body } = parseLogLine(text);
+ const levelClass = level ? LEVEL_ROW_BG[level] : "";
+ const lineNoClass = level ? LEVEL_LINE_NO[level] : "text-muted-foreground/50";
+ return (
+
+
+ {lineNumber}
+
+ 0 ? { paddingLeft: `${depth}ch` } : undefined}
+ >
+ {level ? (
+ <>
+
+ {LEVEL_LABELS[level]}
+
+ {body ? ` ${body}` : ""}
+ >
+ ) : (
+ body
+ )}
+
+
+ );
+});
+
+function GroupHeaderRow({
+ name,
+ lineNumber,
+ depth,
+ isOpen,
+ onToggle,
+ lineNoWidth,
+}: {
+ name: string;
+ lineNumber: number;
+ depth: number;
+ isOpen: boolean;
+ onToggle: () => void;
+ lineNoWidth: string;
+}) {
+ return (
+
+
+ {lineNumber}
+
+
+
+ );
+}
diff --git a/apps/dashboard/src/components/workflows/graph/step-log-context.ts b/apps/dashboard/src/components/workflows/graph/step-log-context.ts
new file mode 100644
index 0000000..977da7d
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/graph/step-log-context.ts
@@ -0,0 +1,31 @@
+import { createContext, useContext } from "react";
+
+export type OpenStepLogInput = {
+ jobId: number;
+ jobStatus: string;
+ stepNumber: number;
+ stepName: string;
+ sourceNodeId: string;
+};
+
+export type StepLogActions = {
+ open: (input: OpenStepLogInput) => void;
+ close: (id: string) => void;
+};
+
+export function getStepLogNodeId(jobId: number, stepNumber: number): string {
+ return `step-log-${jobId}-${stepNumber}`;
+}
+
+const noop: StepLogActions = {
+ open: () => {},
+ close: () => {},
+};
+
+const StepLogContext = createContext(noop);
+
+export const StepLogProvider = StepLogContext.Provider;
+
+export function useStepLogActions(): StepLogActions {
+ return useContext(StepLogContext);
+}
diff --git a/apps/dashboard/src/components/workflows/graph/step-log-node.tsx b/apps/dashboard/src/components/workflows/graph/step-log-node.tsx
new file mode 100644
index 0000000..6de0efc
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/graph/step-log-node.tsx
@@ -0,0 +1,198 @@
+import { ExternalLinkIcon, RefreshCwIcon, XIcon } from "@diffkit/icons";
+import { Spinner } from "@diffkit/ui/components/spinner";
+import { useQuery, useQueryClient } from "@tanstack/react-query";
+import { Link } from "@tanstack/react-router";
+import {
+ Handle,
+ type Node,
+ type NodeProps,
+ NodeResizeControl,
+ Position,
+} from "@xyflow/react";
+import { useMemo, useState } from "react";
+import {
+ CheckStateIcon,
+ getCheckState,
+} from "#/components/checks/check-state-icon";
+import {
+ githubQueryKeys,
+ githubWorkflowJobLogsQueryOptions,
+} from "#/lib/github.query";
+import { getStepHashId } from "../step-hash";
+import {
+ NODE_HANDLE_CLASS,
+ STEP_LOG_HEIGHT,
+ STEP_LOG_WIDTH,
+} from "./constants";
+import { useGraphConfig } from "./graph-config-context";
+import { useIsNodeHovered } from "./hover-context";
+import {
+ countEntryLines,
+ extractStepLog,
+ type LogEntry,
+} from "./parse-step-log";
+import { StepLogContent } from "./step-log-content";
+import { getStepLogNodeId, useStepLogActions } from "./step-log-context";
+import type { StepLogNodeData } from "./types";
+
+export function StepLogNode({
+ data,
+}: NodeProps>) {
+ const { scope, owner, repo, runId } = useGraphConfig();
+ const { close } = useStepLogActions();
+ const queryClient = useQueryClient();
+ const isJobLive = data.jobStatus !== "completed";
+ const isStepLive = data.stepStatus !== "completed";
+
+ const logsQuery = useQuery({
+ ...githubWorkflowJobLogsQueryOptions(scope, {
+ owner,
+ repo,
+ jobId: data.jobId,
+ }),
+ refetchInterval: isJobLive ? 4000 : false,
+ });
+
+ const entries = useMemo(() => {
+ const raw = logsQuery.data?.logs;
+ if (!raw) return [];
+ const parsed = extractStepLog(raw, data.stepName, {
+ startedAt: data.stepStartedAt,
+ completedAt: data.stepCompletedAt,
+ });
+ return parsed.entries;
+ }, [logsQuery.data, data.stepName, data.stepStartedAt, data.stepCompletedAt]);
+
+ const totalLineCount = useMemo(() => countEntryLines(entries), [entries]);
+ const state = getCheckState({
+ status: data.stepStatus,
+ conclusion: data.stepConclusion,
+ });
+
+ const nodeId = getStepLogNodeId(data.jobId, data.stepNumber);
+ const notAvailable = logsQuery.data?.notAvailable === true;
+ const hasLogs = entries.length > 0;
+ const isHovered = useIsNodeHovered(nodeId);
+
+ const handleRefresh = () => {
+ void queryClient.invalidateQueries({
+ queryKey: githubQueryKeys.actions.workflowJobLogs(scope, {
+ owner,
+ repo,
+ jobId: data.jobId,
+ }),
+ });
+ };
+
+ const [size, setSize] = useState({
+ width: STEP_LOG_WIDTH,
+ height: STEP_LOG_HEIGHT,
+ });
+
+ return (
+ <>
+
+
+
+ setSize({ width: params.width, height: params.height })
+ }
+ style={{
+ background: "transparent",
+ border: "none",
+ width: 16,
+ height: 16,
+ opacity: isHovered ? 1 : 0,
+ transition: "opacity 150ms",
+ }}
+ >
+
+
+
+
+
+ {data.stepName}
+
+ {isStepLive ? (
+
+ Live
+
+ ) : null}
+
+
+
+
+
+
+
+
+
+ >
+ );
+}
diff --git a/apps/dashboard/src/components/workflows/graph/toggle-context.ts b/apps/dashboard/src/components/workflows/graph/toggle-context.ts
new file mode 100644
index 0000000..e587783
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/graph/toggle-context.ts
@@ -0,0 +1,9 @@
+import { createContext, useContext } from "react";
+
+const NodeToggleContext = createContext<(id: string) => void>(() => {});
+
+export const NodeToggleProvider = NodeToggleContext.Provider;
+
+export function useNodeToggle(): (id: string) => void {
+ return useContext(NodeToggleContext);
+}
diff --git a/apps/dashboard/src/components/workflows/graph/types.ts b/apps/dashboard/src/components/workflows/graph/types.ts
new file mode 100644
index 0000000..8624710
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/graph/types.ts
@@ -0,0 +1,44 @@
+import type { Node } from "@xyflow/react";
+import type { CheckState } from "#/components/checks/check-state-icon";
+import type { WorkflowRunJob } from "#/lib/github.types";
+
+export type JobGroup =
+ | { kind: "single"; job: WorkflowRunJob }
+ | { kind: "matrix"; baseName: string; jobs: WorkflowRunJob[] };
+
+export type JobNodeData = {
+ job: WorkflowRunJob;
+ collapsed?: boolean;
+ toggleable?: boolean;
+};
+
+export type MatrixNodeData = {
+ baseName: string;
+ jobs: WorkflowRunJob[];
+ aggregate: CheckState;
+ collapsed?: boolean;
+ toggleable?: boolean;
+};
+
+export type StepLogNodeData = {
+ jobId: number;
+ jobStatus: string;
+ stepNumber: number;
+ stepName: string;
+ stepStatus: string;
+ stepConclusion: string | null;
+ stepStartedAt: string | null;
+ stepCompletedAt: string | null;
+};
+
+export type FlowNode =
+ | Node
+ | Node
+ | Node;
+
+export type GraphEdge = {
+ id: string;
+ source: string;
+ target: string;
+ type: string;
+};
diff --git a/apps/dashboard/src/components/workflows/step-hash.ts b/apps/dashboard/src/components/workflows/step-hash.ts
new file mode 100644
index 0000000..4d0edc8
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/step-hash.ts
@@ -0,0 +1,13 @@
+export function getStepHashId(name: string, fallbackNumber: number): string {
+ const slug = slugify(name);
+ return slug || `step-${fallbackNumber}`;
+}
+
+function slugify(value: string): string {
+ return value
+ .toLowerCase()
+ .normalize("NFKD")
+ .replace(/[\u0300-\u036f]/g, "")
+ .replace(/[^a-z0-9]+/g, "-")
+ .replace(/^-+|-+$/g, "");
+}
diff --git a/apps/dashboard/src/components/workflows/workflow-job-page.tsx b/apps/dashboard/src/components/workflows/workflow-job-page.tsx
new file mode 100644
index 0000000..c86142d
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/workflow-job-page.tsx
@@ -0,0 +1,617 @@
+import {
+ ChevronDownIcon,
+ ChevronRightIcon,
+ ExternalLinkIcon,
+ RefreshCwIcon,
+} from "@diffkit/icons";
+import { Skeleton } from "@diffkit/ui/components/skeleton";
+import { Spinner } from "@diffkit/ui/components/spinner";
+import { useQuery, useQueryClient } from "@tanstack/react-query";
+import { getRouteApi, Link } from "@tanstack/react-router";
+import { memo, useCallback, useEffect, useMemo, useRef, useState } from "react";
+import {
+ CheckStateIcon,
+ getCheckState,
+ getCheckStateColor,
+} from "#/components/checks/check-state-icon";
+import {
+ DetailPageLayout,
+ DetailPageSkeletonLayout,
+ StaggerItem,
+} from "#/components/details/detail-page";
+import {
+ githubQueryKeys,
+ githubViewerQueryOptions,
+ githubWorkflowJobLogsQueryOptions,
+ githubWorkflowRunJobsQueryOptions,
+ githubWorkflowRunLogsBundleQueryOptions,
+ githubWorkflowRunQueryOptions,
+ workflowZipJobName,
+} from "#/lib/github.query";
+import type { WorkflowRunJob, WorkflowRunStep } from "#/lib/github.types";
+import { githubRevalidationSignalKeys } from "#/lib/github-revalidation";
+import { useGitHubSignalStream } from "#/lib/use-github-signal-stream";
+import { useHasMounted } from "#/lib/use-has-mounted";
+import { useNow } from "#/lib/use-now";
+import { useRegisterTab } from "#/lib/use-register-tab";
+import { formatDuration } from "./graph/format";
+import {
+ countEntryLines,
+ extractStepLog,
+ type LogEntry,
+ parseStepLogContent,
+ splitLogLines,
+} from "./graph/parse-step-log";
+import { StepLogContent } from "./graph/step-log-content";
+import { getStepHashId } from "./step-hash";
+import { WorkflowRunHeader } from "./workflow-run-header";
+import { WorkflowRunSidebar } from "./workflow-run-sidebar";
+
+const routeApi = getRouteApi(
+ "/_protected/$owner/$repo/actions/runs/$runId_/job/$jobId",
+);
+
+export function WorkflowJobPage() {
+ const { user } = routeApi.useRouteContext();
+ const { owner, repo, runId, jobId } = routeApi.useParams();
+
+ const scope = useMemo(() => ({ userId: user.id }), [user.id]);
+ const runIdNum = Number(runId);
+ const jobIdNum = Number(jobId);
+ const hasMounted = useHasMounted();
+
+ useQuery({
+ ...githubViewerQueryOptions(scope),
+ enabled: hasMounted,
+ });
+ const runQuery = useQuery({
+ ...githubWorkflowRunQueryOptions(scope, {
+ owner,
+ repo,
+ runId: runIdNum,
+ }),
+ enabled: hasMounted,
+ });
+ const jobsQuery = useQuery({
+ ...githubWorkflowRunJobsQueryOptions(scope, {
+ owner,
+ repo,
+ runId: runIdNum,
+ }),
+ enabled: hasMounted,
+ });
+
+ const job = useMemo(
+ () => jobsQuery.data?.find((j) => j.id === jobIdNum) ?? null,
+ [jobsQuery.data, jobIdNum],
+ );
+ const isJobLive = job ? job.status !== "completed" : true;
+ const isRunCompleted = runQuery.data?.status === "completed";
+
+ const webhookTargets = useMemo(() => {
+ const runInput = { owner, repo, runId: runIdNum };
+ const runSignal = githubRevalidationSignalKeys.workflowRunEntity(runInput);
+ const jobSignal = githubRevalidationSignalKeys.workflowJobEntity({
+ owner,
+ repo,
+ jobId: jobIdNum,
+ });
+ return [
+ {
+ queryKey: githubQueryKeys.actions.workflowRun(scope, runInput),
+ signalKeys: [runSignal],
+ },
+ {
+ queryKey: githubQueryKeys.actions.workflowRunJobs(scope, runInput),
+ signalKeys: [runSignal, jobSignal],
+ },
+ {
+ queryKey: githubQueryKeys.actions.workflowJobLogs(scope, {
+ owner,
+ repo,
+ jobId: jobIdNum,
+ }),
+ signalKeys: [runSignal, jobSignal],
+ },
+ {
+ queryKey: githubQueryKeys.actions.workflowRunLogsBundle(scope, {
+ ...runInput,
+ attempt: runQuery.data?.runAttempt,
+ }),
+ signalKeys: [runSignal],
+ },
+ ];
+ }, [scope, owner, repo, runIdNum, jobIdNum, runQuery.data?.runAttempt]);
+ useGitHubSignalStream(webhookTargets);
+
+ const logsQuery = useQuery({
+ ...githubWorkflowJobLogsQueryOptions(scope, {
+ owner,
+ repo,
+ jobId: jobIdNum,
+ }),
+ enabled: hasMounted && !isRunCompleted,
+ refetchInterval: isJobLive ? 4000 : false,
+ });
+
+ const bundleQuery = useQuery({
+ ...githubWorkflowRunLogsBundleQueryOptions(scope, {
+ owner,
+ repo,
+ runId: runIdNum,
+ attempt: runQuery.data?.runAttempt,
+ }),
+ enabled: hasMounted && isRunCompleted,
+ });
+
+ const bundleStepLogs = useMemo | null>(() => {
+ if (!job || !bundleQuery.data || bundleQuery.data.notAvailable) return null;
+ const key = workflowZipJobName(job.name);
+ return bundleQuery.data.jobs[key]?.steps ?? null;
+ }, [job, bundleQuery.data]);
+
+ useRegisterTab(
+ runQuery.data && job
+ ? {
+ type: "actions",
+ title: job.name,
+ number: runQuery.data.runNumber,
+ url: `/${owner}/${repo}/actions/runs/${runIdNum}/job/${jobIdNum}`,
+ repo: `${owner}/${repo}`,
+ iconColor: getCheckStateColor(getCheckState(job)),
+ tabId: `actions:${owner}/${repo}/run/${runIdNum}/job/${jobIdNum}`,
+ }
+ : null,
+ );
+
+ if (runQuery.error) throw runQuery.error;
+ const run = runQuery.data;
+ if (!run) return ;
+
+ const jobs = jobsQuery.data ?? [];
+ const pullRequestNumber = run.pullRequests[0]?.number ?? null;
+
+ return (
+
+
+ {
+ if (isRunCompleted) {
+ void bundleQuery.refetch();
+ } else {
+ void logsQuery.refetch();
+ }
+ }}
+ isRunCompleted={isRunCompleted}
+ owner={owner}
+ repo={repo}
+ runId={runIdNum}
+ jobId={jobIdNum}
+ scope={scope}
+ />
+ >
+ }
+ sidebar={
+
+ }
+ />
+ );
+}
+
+function WorkflowJobPageSkeleton() {
+ return (
+
+
+
+
+
+
+
+
+
+
+ );
+}
+
+function JobContainer({
+ job,
+ isJobLoading,
+ rawLogs,
+ bundleStepLogs,
+ notAvailable,
+ isLogsLoading,
+ isLogsError,
+ isLogsFetching,
+ onRefresh,
+ isRunCompleted,
+ owner,
+ repo,
+ runId,
+ jobId,
+ scope,
+}: {
+ job: WorkflowRunJob | null;
+ isJobLoading: boolean;
+ rawLogs: string | null;
+ bundleStepLogs: Record | null;
+ notAvailable: boolean;
+ isLogsLoading: boolean;
+ isLogsError: boolean;
+ isLogsFetching: boolean;
+ onRefresh: () => void;
+ isRunCompleted: boolean;
+ owner: string;
+ repo: string;
+ runId: number;
+ jobId: number;
+ scope: { userId: string };
+}) {
+ const queryClient = useQueryClient();
+ const handleInvalidateAll = useCallback(() => {
+ if (isRunCompleted) {
+ void queryClient.invalidateQueries({
+ queryKey: githubQueryKeys.actions.workflowRunLogsBundle(scope, {
+ owner,
+ repo,
+ runId,
+ }),
+ });
+ } else {
+ void queryClient.invalidateQueries({
+ queryKey: githubQueryKeys.actions.workflowJobLogs(scope, {
+ owner,
+ repo,
+ jobId,
+ }),
+ });
+ }
+ onRefresh();
+ }, [
+ queryClient,
+ scope,
+ owner,
+ repo,
+ runId,
+ jobId,
+ onRefresh,
+ isRunCompleted,
+ ]);
+
+ const logLines = useMemo(
+ () => (rawLogs ? splitLogLines(rawLogs) : null),
+ [rawLogs],
+ );
+
+ if (!job) {
+ return (
+
+
+ {isJobLoading ? (
+ <>
+
+ Loading job…
+ >
+ ) : (
+ "Job not found."
+ )}
+
+
+ );
+ }
+
+ return (
+
+
+
+ {job.steps.length === 0 ? (
+
+ No steps to display.
+
+ ) : (
+ job.steps.map((step) => (
+
+ ))
+ )}
+
+
+
+ );
+}
+
+function JobHeader({
+ job,
+ isLogsFetching,
+ onRefresh,
+}: {
+ job: WorkflowRunJob;
+ isLogsFetching: boolean;
+ onRefresh: () => void;
+}) {
+ return (
+
+
+ {job.name}
+
+
+
+
+ {job.htmlUrl ? (
+
+
+
+ ) : null}
+
+
+ );
+}
+
+function JobHeaderTimingLabel({
+ startedAt,
+ completedAt,
+}: {
+ startedAt: string | null;
+ completedAt: string | null;
+}) {
+ if (!startedAt) {
+ return Queued;
+ }
+ if (completedAt) {
+ const text = formatDuration(startedAt, completedAt);
+ return (
+ Ran for {text}
+ );
+ }
+ return ;
+}
+
+function LiveJobHeaderTimingLabel({ startedAt }: { startedAt: string }) {
+ const now = useNow();
+ const text = formatDuration(startedAt, null, now);
+ return (
+ Started {text} ago
+ );
+}
+
+function JobFooter({
+ owner,
+ repo,
+ runId,
+}: {
+ owner: string;
+ repo: string;
+ runId: number;
+}) {
+ return (
+
+ Part of workflow run{" "}
+
+ #{runId}
+
+
+ );
+}
+
+const JobStepRow = memo(function JobStepRow({
+ step,
+ stepLogText,
+ logLines,
+ notAvailable,
+ isLogsLoading,
+ isLogsError,
+}: {
+ step: WorkflowRunStep;
+ /** Authoritative per-step content from the run-level zip, when available. */
+ stepLogText: string | null;
+ /** Per-job text fallback (used when the bundle isn't available — typically in-progress runs). */
+ logLines: string[] | null;
+ notAvailable: boolean;
+ isLogsLoading: boolean;
+ isLogsError: boolean;
+}) {
+ const hashId = getStepHashId(step.name, step.number);
+ const rowRef = useRef(null);
+ const [expanded, setExpanded] = useState(() => {
+ if (typeof window === "undefined") return false;
+ return window.location.hash.slice(1) === hashId;
+ });
+
+ useEffect(() => {
+ const syncFromHash = () => {
+ if (window.location.hash.slice(1) !== hashId) return;
+ setExpanded(true);
+ rowRef.current?.scrollIntoView({ block: "start" });
+ };
+ syncFromHash();
+ window.addEventListener("hashchange", syncFromHash);
+ return () => window.removeEventListener("hashchange", syncFromHash);
+ }, [hashId]);
+
+ const handleToggle = useCallback(() => {
+ setExpanded((prev) => {
+ const next = !prev;
+ if (typeof window === "undefined") return next;
+ const current = window.location.hash.slice(1);
+ if (next) {
+ if (current !== hashId) {
+ history.replaceState(null, "", `#${hashId}`);
+ }
+ } else if (current === hashId) {
+ history.replaceState(
+ null,
+ "",
+ window.location.pathname + window.location.search,
+ );
+ }
+ return next;
+ });
+ }, [hashId]);
+
+ const entries = useMemo(() => {
+ if (stepLogText != null) return parseStepLogContent(stepLogText);
+ if (!logLines) return [];
+ return extractStepLog(logLines, step.name, {
+ startedAt: step.startedAt,
+ completedAt: step.completedAt,
+ }).entries;
+ }, [stepLogText, logLines, step.name, step.startedAt, step.completedAt]);
+
+ const totalLineCount = useMemo(() => countEntryLines(entries), [entries]);
+ const state = getCheckState({
+ status: step.status,
+ conclusion: step.conclusion,
+ });
+ const isStepLive = step.status !== "completed";
+ const hasLogs = entries.length > 0;
+
+ return (
+
+
+ {expanded ? (
+
+
+
+ ) : null}
+
+ );
+});
+
+function StepDuration({
+ startedAt,
+ completedAt,
+}: {
+ startedAt: string | null;
+ completedAt: string | null;
+}) {
+ if (!startedAt) return null;
+ if (completedAt) {
+ const text = formatDuration(startedAt, completedAt);
+ return text ? (
+
+ {text}
+
+ ) : null;
+ }
+ return ;
+}
+
+function LiveStepDuration({ startedAt }: { startedAt: string }) {
+ const now = useNow();
+ const text = formatDuration(startedAt, null, now);
+ return text ? (
+
+ {text}
+
+ ) : null;
+}
diff --git a/apps/dashboard/src/components/workflows/workflow-run-artifacts.tsx b/apps/dashboard/src/components/workflows/workflow-run-artifacts.tsx
new file mode 100644
index 0000000..2bdfc94
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/workflow-run-artifacts.tsx
@@ -0,0 +1,156 @@
+import { DownloadIcon, PackageIcon } from "@diffkit/icons";
+import {
+ Table,
+ TableBody,
+ TableCell,
+ TableHead,
+ TableHeader,
+ TableRow,
+} from "@diffkit/ui/components/table";
+import {
+ Tooltip,
+ TooltipContent,
+ TooltipTrigger,
+} from "@diffkit/ui/components/tooltip";
+import { useCallback, useEffect, useRef, useState } from "react";
+import type { WorkflowRunArtifact } from "#/lib/github.types";
+
+export function WorkflowRunArtifacts({
+ artifacts,
+}: {
+ artifacts: WorkflowRunArtifact[];
+}) {
+ if (artifacts.length === 0) return null;
+
+ return (
+
+
+
Artifacts
+
+ {artifacts.length}
+
+
+
+
+
+
+ Name
+
+
+ Size
+
+
+ Digest
+
+
+
+
+
+ {artifacts.map((artifact) => (
+
+ ))}
+
+
+
+ );
+}
+
+function ArtifactRow({ artifact }: { artifact: WorkflowRunArtifact }) {
+ const isDownloadable = !artifact.expired;
+ return (
+
+
+
+
+ {isDownloadable ? (
+
+ {artifact.name}
+
+ ) : (
+
+ {artifact.name}
+
+ )}
+ {artifact.expired ? (
+
+ Expired
+
+ ) : null}
+
+
+
+ {formatSize(artifact.sizeInBytes)}
+
+
+ {artifact.digest ? (
+
+ ) : (
+ —
+ )}
+
+
+ {isDownloadable ? (
+
+
+
+ ) : null}
+
+
+ );
+}
+
+function DigestCell({ digest }: { digest: string }) {
+ const [copied, setCopied] = useState(false);
+ const timeoutRef = useRef>(undefined);
+
+ const handleCopy = useCallback(async () => {
+ try {
+ await navigator.clipboard.writeText(digest);
+ setCopied(true);
+ clearTimeout(timeoutRef.current);
+ timeoutRef.current = setTimeout(() => setCopied(false), 1500);
+ } catch {
+ setCopied(false);
+ }
+ }, [digest]);
+
+ useEffect(() => () => clearTimeout(timeoutRef.current), []);
+
+ return (
+
+
+
+
+ Copied!
+
+ );
+}
+
+function formatSize(bytes: number): string {
+ if (bytes < 1024) return `${bytes} B`;
+ if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
+ if (bytes < 1024 * 1024 * 1024)
+ return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
+ return `${(bytes / (1024 * 1024 * 1024)).toFixed(2)} GB`;
+}
diff --git a/apps/dashboard/src/components/workflows/workflow-run-graph-canvas.tsx b/apps/dashboard/src/components/workflows/workflow-run-graph-canvas.tsx
new file mode 100644
index 0000000..286c418
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/workflow-run-graph-canvas.tsx
@@ -0,0 +1,470 @@
+import { Background, type Node, ReactFlow, useNodesState } from "@xyflow/react";
+import "@xyflow/react/dist/style.css";
+import { cn } from "@diffkit/ui/lib/utils";
+import { useCallback, useEffect, useMemo, useRef, useState } from "react";
+import type { GitHubQueryScope } from "#/lib/github.query";
+import type {
+ WorkflowDefinition,
+ WorkflowRun,
+ WorkflowRunJob,
+} from "#/lib/github.types";
+import { buildLayoutFromDefinition } from "./graph/build-layout";
+import {
+ COLUMN_GAP,
+ MATRIX_SUFFIX_RE,
+ NODE_WIDTH,
+ ROW_GAP,
+ STEP_LOG_GAP,
+ STEP_LOG_HEIGHT,
+ VARIANT_POPUP_GAP,
+} from "./graph/constants";
+import { collectConnectedEdgeIds } from "./graph/edges";
+import {
+ type GraphConfig,
+ GraphConfigProvider,
+} from "./graph/graph-config-context";
+import { GraphControls } from "./graph/graph-controls";
+import {
+ buildColumns,
+ getAggregateState,
+ getGroupId,
+ groupJobs,
+} from "./graph/grouping";
+import { estimateNodeHeight } from "./graph/height";
+import { NodeHoverProvider } from "./graph/hover-context";
+import { JobNode } from "./graph/job-node";
+import { MatrixNode } from "./graph/matrix-node";
+import {
+ getStepLogNodeId,
+ type OpenStepLogInput,
+ type StepLogActions,
+ StepLogProvider,
+} from "./graph/step-log-context";
+import { StepLogNode } from "./graph/step-log-node";
+import { NodeToggleProvider } from "./graph/toggle-context";
+import type {
+ FlowNode,
+ GraphEdge,
+ JobNodeData,
+ MatrixNodeData,
+ StepLogNodeData,
+} from "./graph/types";
+
+const nodeTypes = {
+ job: JobNode,
+ matrix: MatrixNode,
+ stepLog: StepLogNode,
+};
+
+const FIT_VIEW_OPTIONS = { padding: 0.25, maxZoom: 1 };
+const PRO_OPTIONS = { hideAttribution: true };
+const DEFAULT_EDGE_OPTIONS = { type: "smoothstep" as const };
+
+function getPopupNodeId(matrixId: string, jobId: number): string {
+ return `variant-${matrixId}-${jobId}`;
+}
+
+export function WorkflowRunGraphCanvas({
+ run,
+ jobs,
+ definition,
+ scope,
+ owner,
+ repo,
+ runId,
+}: {
+ run: WorkflowRun;
+ jobs: WorkflowRunJob[];
+ definition: WorkflowDefinition | null;
+ scope: GitHubQueryScope;
+ owner: string;
+ repo: string;
+ runId: number;
+}) {
+ const workflowFilename = useMemo(() => {
+ const segments = run.path.split("/");
+ return segments[segments.length - 1] || run.path;
+ }, [run.path]);
+
+ const graphConfig = useMemo(
+ () => ({ scope, owner, repo, runId }),
+ [scope, owner, repo, runId],
+ );
+
+ const collectDefaultCollapsedIds = useCallback(
+ (jobs: WorkflowRunJob[], definition: WorkflowDefinition | null) => {
+ const ids = new Set();
+ for (const job of jobs) {
+ const match = MATRIX_SUFFIX_RE.exec(job.name);
+ if (match) {
+ ids.add(`matrix-${match[1]}`);
+ ids.add(`def-${match[1]}`);
+ ids.add(getPopupNodeId(`matrix-${match[1]}`, job.id));
+ ids.add(getPopupNodeId(`def-${match[1]}`, job.id));
+ } else {
+ ids.add(`job-${job.id}`);
+ }
+ }
+ if (definition) {
+ for (const yamlJob of definition.jobs) {
+ ids.add(`def-${yamlJob.key}`);
+ }
+ }
+ return ids;
+ },
+ [],
+ );
+
+ const [collapsedIds, setCollapsedIds] = useState>(() =>
+ collectDefaultCollapsedIds(jobs, definition),
+ );
+
+ const autoCollapsedRef = useRef>(new Set(collapsedIds));
+ useEffect(() => {
+ const desired = collectDefaultCollapsedIds(jobs, definition);
+ const toAdd: string[] = [];
+ for (const id of desired) {
+ if (!autoCollapsedRef.current.has(id)) {
+ toAdd.push(id);
+ autoCollapsedRef.current.add(id);
+ }
+ }
+ if (toAdd.length === 0) return;
+ setCollapsedIds((prev) => {
+ const next = new Set(prev);
+ for (const id of toAdd) next.add(id);
+ return next;
+ });
+ }, [jobs, definition, collectDefaultCollapsedIds]);
+
+ const toggleCollapsed = useCallback((nodeId: string) => {
+ setCollapsedIds((prev) => {
+ const next = new Set(prev);
+ if (next.has(nodeId)) next.delete(nodeId);
+ else next.add(nodeId);
+ return next;
+ });
+ }, []);
+
+ const [openStepLogs, setOpenStepLogs] = useState([]);
+ const stepLogActions = useMemo(
+ () => ({
+ open: (input) => {
+ setOpenStepLogs((prev) => {
+ if (
+ prev.some(
+ (l) =>
+ l.jobId === input.jobId && l.stepNumber === input.stepNumber,
+ )
+ ) {
+ return prev;
+ }
+ return [...prev, input];
+ });
+ },
+ close: (nodeId) => {
+ setOpenStepLogs((prev) =>
+ prev.filter(
+ (l) => getStepLogNodeId(l.jobId, l.stepNumber) !== nodeId,
+ ),
+ );
+ },
+ }),
+ [],
+ );
+
+ useEffect(() => {
+ setOpenStepLogs((prev) => {
+ const jobById = new Map(jobs.map((j) => [j.id, j]));
+ const next = prev.filter((l) => {
+ const job = jobById.get(l.jobId);
+ if (!job) return false;
+ return job.steps.some((s) => s.number === l.stepNumber);
+ });
+ return next.length === prev.length ? prev : next;
+ });
+ }, [jobs]);
+
+ const [hoveredNodeId, setHoveredNodeId] = useState(null);
+ const [stepLogPositions, setStepLogPositions] = useState<
+ Record
+ >({});
+
+ useEffect(() => {
+ setStepLogPositions((prev) => {
+ const active = new Set(
+ openStepLogs.map((l) => getStepLogNodeId(l.jobId, l.stepNumber)),
+ );
+ let changed = false;
+ const next: typeof prev = {};
+ for (const [key, value] of Object.entries(prev)) {
+ if (active.has(key)) next[key] = value;
+ else changed = true;
+ }
+ return changed ? next : prev;
+ });
+ }, [openStepLogs]);
+
+ const { nodes: computedNodes, baseEdges } = useMemo(() => {
+ let builtNodes: FlowNode[] = [];
+ let builtEdges: GraphEdge[] = [];
+
+ if (definition) {
+ const layout = buildLayoutFromDefinition(jobs, definition, collapsedIds);
+ if (layout) {
+ builtNodes = layout.nodes;
+ builtEdges = layout.edges;
+ }
+ }
+
+ if (builtNodes.length === 0) {
+ const groups = groupJobs(jobs);
+ const columns = buildColumns(groups);
+
+ columns.forEach((column, colIndex) => {
+ const x = colIndex * (NODE_WIDTH + COLUMN_GAP);
+ let currentY = 0;
+ for (const group of column) {
+ const nodeId = getGroupId(group);
+ let flowNode: FlowNode;
+ if (group.kind === "matrix") {
+ flowNode = {
+ id: nodeId,
+ type: "matrix",
+ position: { x, y: currentY },
+ data: {
+ baseName: group.baseName,
+ jobs: group.jobs,
+ aggregate: getAggregateState(group.jobs),
+ collapsed: collapsedIds.has(nodeId),
+ },
+ } satisfies Node;
+ } else {
+ flowNode = {
+ id: nodeId,
+ type: "job",
+ position: { x, y: currentY },
+ data: {
+ job: group.job,
+ collapsed: collapsedIds.has(nodeId),
+ },
+ } satisfies Node;
+ }
+ builtNodes.push(flowNode);
+ currentY += estimateNodeHeight(flowNode) + ROW_GAP;
+ }
+ });
+ }
+
+ const matrixNodes = builtNodes.filter(
+ (n): n is Node => n.type === "matrix",
+ );
+ for (const matrix of matrixNodes) {
+ const popupX =
+ matrix.position.x + NODE_WIDTH + COLUMN_GAP + VARIANT_POPUP_GAP;
+ let popupY = matrix.position.y;
+ for (const job of matrix.data.jobs) {
+ const popupId = getPopupNodeId(matrix.id, job.id);
+ const popupNode = {
+ id: popupId,
+ type: "job",
+ position: { x: popupX, y: popupY },
+ data: {
+ job,
+ collapsed: collapsedIds.has(popupId),
+ },
+ } satisfies Node;
+ builtNodes.push(popupNode);
+ builtEdges.push({
+ id: `${matrix.id}->${popupId}`,
+ source: matrix.id,
+ target: popupId,
+ type: "smoothstep",
+ });
+ popupY += estimateNodeHeight(popupNode) + ROW_GAP;
+ }
+ }
+
+ if (openStepLogs.length > 0) {
+ let maxRight = 0;
+ for (const n of builtNodes) {
+ maxRight = Math.max(maxRight, n.position.x + NODE_WIDTH);
+ }
+ const stepLogX = maxRight + COLUMN_GAP + STEP_LOG_GAP;
+ const logsByNodeId = new Map();
+ for (const log of openStepLogs) {
+ const arr = logsByNodeId.get(log.sourceNodeId) ?? [];
+ arr.push(log);
+ logsByNodeId.set(log.sourceNodeId, arr);
+ }
+ let currentY = 0;
+ for (const [sourceNodeId, logs] of logsByNodeId) {
+ for (const log of logs) {
+ const nodeId = getStepLogNodeId(log.jobId, log.stepNumber);
+ const job = jobs.find((j) => j.id === log.jobId);
+ const step = job?.steps.find((s) => s.number === log.stepNumber);
+ if (!job || !step) continue;
+ const persisted = stepLogPositions[nodeId];
+ const stepLogNode = {
+ id: nodeId,
+ type: "stepLog",
+ position: persisted ?? { x: stepLogX, y: currentY },
+ draggable: true,
+ data: {
+ jobId: job.id,
+ jobStatus: job.status,
+ stepNumber: step.number,
+ stepName: step.name,
+ stepStatus: step.status,
+ stepConclusion: step.conclusion,
+ stepStartedAt: step.startedAt,
+ stepCompletedAt: step.completedAt,
+ },
+ } satisfies Node;
+ builtNodes.push(stepLogNode);
+ builtEdges.push({
+ id: `${sourceNodeId}->${nodeId}`,
+ source: sourceNodeId,
+ target: nodeId,
+ type: "smoothstep",
+ });
+ currentY += STEP_LOG_HEIGHT + ROW_GAP;
+ }
+ }
+ }
+
+ return { nodes: builtNodes, baseEdges: builtEdges };
+ }, [jobs, definition, collapsedIds, openStepLogs, stepLogPositions]);
+
+ const [internalNodes, setInternalNodes, onNodesChange] =
+ useNodesState([]);
+
+ useEffect(() => {
+ setInternalNodes((prev) => {
+ const prevById = new Map(prev.map((n) => [n.id, n]));
+ return computedNodes.map((node) => {
+ if (node.type !== "stepLog") return node;
+ const prevNode = prevById.get(node.id);
+ if (prevNode && prevNode.type === "stepLog") {
+ return { ...node, position: prevNode.position };
+ }
+ return node;
+ });
+ });
+ }, [computedNodes, setInternalNodes]);
+
+ const edges = useMemo(() => {
+ if (!hoveredNodeId) {
+ return baseEdges.map((edge) => ({
+ ...edge,
+ style: { stroke: "var(--color-border)", strokeWidth: 1.5 },
+ }));
+ }
+ const connected = collectConnectedEdgeIds(hoveredNodeId, baseEdges);
+ return baseEdges.map((edge) => {
+ const isConnected = connected.has(edge.id);
+ return {
+ ...edge,
+ animated: isConnected,
+ zIndex: isConnected ? 1000 : 0,
+ style: {
+ stroke: isConnected
+ ? "var(--color-foreground)"
+ : "var(--color-border)",
+ strokeWidth: isConnected ? 2 : 1.5,
+ opacity: isConnected ? 1 : 0.35,
+ },
+ };
+ });
+ }, [baseEdges, hoveredNodeId]);
+
+ const containerRef = useRef(null);
+ const [isFullscreen, setIsFullscreen] = useState(false);
+ useEffect(() => {
+ const handler = () =>
+ setIsFullscreen(document.fullscreenElement === containerRef.current);
+ document.addEventListener("fullscreenchange", handler);
+ return () => document.removeEventListener("fullscreenchange", handler);
+ }, []);
+ const toggleFullscreen = useCallback(() => {
+ const el = containerRef.current;
+ if (!el) return;
+ if (document.fullscreenElement) {
+ void document.exitFullscreen();
+ } else {
+ void el.requestFullscreen();
+ }
+ }, []);
+
+ const onNodeMouseEnter = useCallback(
+ (_e: React.MouseEvent, node: Node) => setHoveredNodeId(node.id),
+ [],
+ );
+ const onNodeMouseLeave = useCallback(() => setHoveredNodeId(null), []);
+
+ const onNodeDragStop = useCallback((_e: React.MouseEvent, node: Node) => {
+ if (node.type !== "stepLog") return;
+ setStepLogPositions((prev) => ({
+ ...prev,
+ [node.id]: { x: node.position.x, y: node.position.y },
+ }));
+ }, []);
+
+ return (
+
+
+ {internalNodes.length === 0 ? (
+
No jobs yet.
+ ) : (
+
+
+
+
+
+
+
+
+
+
+
+
+ )}
+
+
+
+
{workflowFilename}
+
on: {run.event}
+
+
+ );
+}
diff --git a/apps/dashboard/src/components/workflows/workflow-run-graph.tsx b/apps/dashboard/src/components/workflows/workflow-run-graph.tsx
new file mode 100644
index 0000000..c991ac5
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/workflow-run-graph.tsx
@@ -0,0 +1,63 @@
+import { Skeleton } from "@diffkit/ui/components/skeleton";
+import { lazy, Suspense } from "react";
+import type { GitHubQueryScope } from "#/lib/github.query";
+import type {
+ WorkflowDefinition,
+ WorkflowRun,
+ WorkflowRunJob,
+} from "#/lib/github.types";
+import { useHasMounted } from "#/lib/use-has-mounted";
+
+const WorkflowRunGraphCanvas = lazy(() =>
+ import("./workflow-run-graph-canvas").then((mod) => ({
+ default: mod.WorkflowRunGraphCanvas,
+ })),
+);
+
+export function WorkflowRunGraph({
+ run,
+ jobs,
+ definition,
+ scope,
+ owner,
+ repo,
+ runId,
+}: {
+ run: WorkflowRun;
+ jobs: WorkflowRunJob[];
+ definition: WorkflowDefinition | null;
+ scope: GitHubQueryScope;
+ owner: string;
+ repo: string;
+ runId: number;
+}) {
+ const hasMounted = useHasMounted();
+
+ if (!hasMounted) return ;
+
+ return (
+ }>
+
+
+ );
+}
+
+function GraphPlaceholder() {
+ return (
+
+ );
+}
diff --git a/apps/dashboard/src/components/workflows/workflow-run-header.tsx b/apps/dashboard/src/components/workflows/workflow-run-header.tsx
new file mode 100644
index 0000000..19b5067
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/workflow-run-header.tsx
@@ -0,0 +1,202 @@
+import {
+ ChevronLeftIcon,
+ ExternalLinkIcon,
+ MoreHorizontalIcon,
+ RefreshCwIcon,
+} from "@diffkit/icons";
+import { Button } from "@diffkit/ui/components/button";
+import {
+ DropdownMenu,
+ DropdownMenuContent,
+ DropdownMenuItem,
+ DropdownMenuTrigger,
+} from "@diffkit/ui/components/dropdown-menu";
+import { toast } from "@diffkit/ui/components/sonner";
+import { Spinner } from "@diffkit/ui/components/spinner";
+import { useQueryClient } from "@tanstack/react-query";
+import { Link } from "@tanstack/react-router";
+import { useState } from "react";
+import {
+ CheckStateIcon,
+ getCheckState,
+} from "#/components/checks/check-state-icon";
+import {
+ rerunFailedWorkflowJobs,
+ rerunWorkflowRun,
+} from "#/lib/github.functions";
+import { type GitHubQueryScope, githubQueryKeys } from "#/lib/github.query";
+import type { WorkflowRun } from "#/lib/github.types";
+
+export function WorkflowRunHeader({
+ owner,
+ repo,
+ run,
+ pullRequestNumber,
+ scope,
+}: {
+ owner: string;
+ repo: string;
+ run: WorkflowRun;
+ pullRequestNumber: number | null;
+ scope: GitHubQueryScope;
+}) {
+ const queryClient = useQueryClient();
+ const [rerunPending, setRerunPending] = useState<"all" | "failed" | null>(
+ null,
+ );
+
+ const state = getCheckState(run);
+ const failedOnly = state === "failure";
+ const canRerun = run.viewerCanRerun;
+
+ const invalidateAfterRerun = async () => {
+ const runInput = { owner, repo, runId: run.id };
+ await Promise.all([
+ queryClient.invalidateQueries({
+ queryKey: githubQueryKeys.actions.workflowRun(scope, runInput),
+ }),
+ queryClient.invalidateQueries({
+ queryKey: githubQueryKeys.actions.workflowRunJobs(scope, runInput),
+ }),
+ queryClient.invalidateQueries({
+ queryKey: githubQueryKeys.actions.workflowRunArtifacts(scope, runInput),
+ }),
+ pullRequestNumber
+ ? queryClient.invalidateQueries({
+ queryKey: githubQueryKeys.pulls.status(scope, {
+ owner,
+ repo,
+ pullNumber: pullRequestNumber,
+ }),
+ })
+ : Promise.resolve(),
+ ]);
+ };
+
+ const handleRerun = async (mode: "all" | "failed") => {
+ setRerunPending(mode);
+ try {
+ const result =
+ mode === "failed"
+ ? await rerunFailedWorkflowJobs({
+ data: { owner, repo, runId: run.id },
+ })
+ : await rerunWorkflowRun({
+ data: { owner, repo, runId: run.id },
+ });
+ if (result.ok) {
+ toast.success(
+ mode === "failed" ? "Re-running failed jobs" : "Re-running all jobs",
+ );
+ await invalidateAfterRerun();
+ } else {
+ toast.error(result.error);
+ }
+ } catch {
+ toast.error("Failed to re-run workflow");
+ } finally {
+ setRerunPending(null);
+ }
+ };
+
+ return (
+
+ {pullRequestNumber != null ? (
+
+
+ Back to pull request #{pullRequestNumber}
+
+ ) : (
+
+
+ {owner}/{repo}
+
+ /
+ Actions
+ /
+ #{run.runNumber}
+
+ )}
+
+
+
+
+
+
+
+ {run.displayTitle}
+
+ #{run.runNumber}
+
+
+ {run.name && run.name !== run.displayTitle ? (
+
{run.name}
+ ) : null}
+
+
+ {canRerun ? (
+
+ ) : null}
+
+
+
+
+
+ {canRerun ? (
+ <>
+ handleRerun("failed")}
+ >
+
+ Re-run failed jobs
+
+ handleRerun("all")}
+ >
+
+ Re-run all jobs
+
+ >
+ ) : null}
+
+
+
+ View on GitHub
+
+
+
+
+
+
+
+ );
+}
diff --git a/apps/dashboard/src/components/workflows/workflow-run-page.tsx b/apps/dashboard/src/components/workflows/workflow-run-page.tsx
new file mode 100644
index 0000000..3002b08
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/workflow-run-page.tsx
@@ -0,0 +1,193 @@
+import { Skeleton } from "@diffkit/ui/components/skeleton";
+import { useQuery } from "@tanstack/react-query";
+import { getRouteApi } from "@tanstack/react-router";
+import { useMemo } from "react";
+import {
+ getCheckState,
+ getCheckStateColor,
+} from "#/components/checks/check-state-icon";
+import {
+ DetailPageLayout,
+ DetailPageSkeletonLayout,
+ StaggerItem,
+} from "#/components/details/detail-page";
+import {
+ githubQueryKeys,
+ githubViewerQueryOptions,
+ githubWorkflowDefinitionQueryOptions,
+ githubWorkflowRunArtifactsQueryOptions,
+ githubWorkflowRunJobsQueryOptions,
+ githubWorkflowRunQueryOptions,
+} from "#/lib/github.query";
+import { githubRevalidationSignalKeys } from "#/lib/github-revalidation";
+import { useGitHubSignalStream } from "#/lib/use-github-signal-stream";
+import { useHasMounted } from "#/lib/use-has-mounted";
+import { useRegisterTab } from "#/lib/use-register-tab";
+import { WorkflowRunArtifacts } from "./workflow-run-artifacts";
+import { WorkflowRunGraph } from "./workflow-run-graph";
+import { WorkflowRunHeader } from "./workflow-run-header";
+import { WorkflowRunSidebar } from "./workflow-run-sidebar";
+import { WorkflowRunSummary } from "./workflow-run-summary";
+
+const routeApi = getRouteApi("/_protected/$owner/$repo/actions/runs/$runId");
+
+export function WorkflowRunPage() {
+ const { user } = routeApi.useRouteContext();
+ const { owner, repo, runId } = routeApi.useParams();
+ const { pr: prNumberFromSearch } = routeApi.useSearch();
+
+ const runIdNum = Number(runId);
+ const scope = useMemo(() => ({ userId: user.id }), [user.id]);
+ const input = useMemo(
+ () => ({ owner, repo, runId: runIdNum }),
+ [owner, repo, runIdNum],
+ );
+ const hasMounted = useHasMounted();
+
+ const webhookRefreshTargets = useMemo(() => {
+ const runSignals = [githubRevalidationSignalKeys.workflowRunEntity(input)];
+ return [
+ {
+ queryKey: githubQueryKeys.actions.workflowRun(scope, input),
+ signalKeys: runSignals,
+ },
+ {
+ queryKey: githubQueryKeys.actions.workflowRunJobs(scope, input),
+ signalKeys: runSignals,
+ },
+ {
+ queryKey: githubQueryKeys.actions.workflowRunArtifacts(scope, input),
+ signalKeys: runSignals,
+ },
+ ];
+ }, [scope, input]);
+ useGitHubSignalStream(webhookRefreshTargets);
+
+ const runQuery = useQuery({
+ ...githubWorkflowRunQueryOptions(scope, input),
+ enabled: hasMounted,
+ });
+ const jobsQuery = useQuery({
+ ...githubWorkflowRunJobsQueryOptions(scope, input),
+ enabled: hasMounted,
+ });
+ const artifactsQuery = useQuery({
+ ...githubWorkflowRunArtifactsQueryOptions(scope, input),
+ enabled: hasMounted,
+ });
+ useQuery({
+ ...githubViewerQueryOptions(scope),
+ enabled: hasMounted,
+ });
+
+ const definitionInput = useMemo(
+ () => ({
+ owner,
+ repo,
+ path: runQuery.data?.path ?? "",
+ ref: runQuery.data?.headSha ?? "",
+ }),
+ [owner, repo, runQuery.data?.path, runQuery.data?.headSha],
+ );
+ const definitionQuery = useQuery({
+ ...githubWorkflowDefinitionQueryOptions(scope, definitionInput),
+ enabled: hasMounted && !!runQuery.data,
+ });
+
+ useRegisterTab(
+ runQuery.data
+ ? {
+ type: "actions",
+ title:
+ runQuery.data.displayTitle ||
+ runQuery.data.name ||
+ `Run #${runQuery.data.runNumber}`,
+ number: runQuery.data.runNumber,
+ url: `/${owner}/${repo}/actions/runs/${runIdNum}`,
+ repo: `${owner}/${repo}`,
+ iconColor: getCheckStateColor(getCheckState(runQuery.data)),
+ tabId: `actions:${owner}/${repo}/run/${runIdNum}`,
+ }
+ : null,
+ );
+
+ if (runQuery.error) throw runQuery.error;
+ const run = runQuery.data;
+ if (!run) return ;
+
+ const jobs = jobsQuery.data ?? [];
+ const artifacts = artifactsQuery.data ?? [];
+ const definition = definitionQuery.data ?? null;
+ const pullRequestNumber =
+ prNumberFromSearch ?? run.pullRequests[0]?.number ?? null;
+
+ return (
+
+
+
+
+
+ >
+ }
+ sidebar={
+
+ }
+ />
+ );
+}
+
+function WorkflowRunPageSkeleton() {
+ return (
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ );
+}
diff --git a/apps/dashboard/src/components/workflows/workflow-run-row.tsx b/apps/dashboard/src/components/workflows/workflow-run-row.tsx
new file mode 100644
index 0000000..2fbed21
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/workflow-run-row.tsx
@@ -0,0 +1,141 @@
+import { GitBranchIcon } from "@diffkit/icons";
+import { cn } from "@diffkit/ui/lib/utils";
+import { Link, useRouter } from "@tanstack/react-router";
+import { memo, useEffect, useMemo, useState } from "react";
+import {
+ CheckStateIcon,
+ getCheckState,
+} from "#/components/checks/check-state-icon";
+import { formatRelativeTime } from "#/lib/format-relative-time";
+import type { WorkflowRun } from "#/lib/github.types";
+import { preloadRouteOnce } from "#/lib/route-preload";
+import { formatDuration } from "./graph/format";
+
+export const WorkflowRunRow = memo(function WorkflowRunRow({
+ run,
+ owner,
+ repo,
+}: {
+ run: WorkflowRun;
+ owner: string;
+ repo: string;
+}) {
+ const state = getCheckState(run);
+ const isLive = state === "pending" || state === "waiting";
+ const router = useRouter();
+
+ const linkParams = useMemo(
+ () => ({ owner, repo, runId: String(run.id) }),
+ [owner, repo, run.id],
+ );
+
+ const preloadDetail = () => {
+ void preloadRouteOnce(router, `/${owner}/${repo}/actions/runs/${run.id}`);
+ };
+
+ const workflowName = useMemo(
+ () =>
+ run.name ??
+ run.path
+ .split("/")
+ .pop()
+ ?.replace(/\.ya?ml$/, "") ??
+ "Workflow",
+ [run.name, run.path],
+ );
+
+ return (
+
+
+
+
+
+
{run.displayTitle}
+
+ {workflowName}
+ ·
+ #{run.runNumber}
+ {run.headBranch ? (
+ <>
+ ·
+
+
+ {run.headBranch}
+
+ >
+ ) : null}
+ ·
+ {run.event}
+ {run.actor ? (
+ <>
+ ·
+
+ {run.actor.login}
+ >
+ ) : null}
+ ·
+ {formatRelativeTime(run.updatedAt)}
+
+
+
+
+ );
+});
+
+/** Renders the duration — subscribes to a 1s tick only when the run is live. */
+const RunDuration = memo(function RunDuration({
+ startedAt,
+ completedAt,
+ isLive,
+}: {
+ startedAt: string | null;
+ completedAt: string | null;
+ isLive: boolean;
+}) {
+ const [nowTick, setNowTick] = useState(() => Date.now());
+
+ useEffect(() => {
+ if (!isLive) return;
+ const id = setInterval(() => setNowTick(Date.now()), 1000);
+ return () => clearInterval(id);
+ }, [isLive]);
+
+ const duration = formatDuration(
+ startedAt,
+ completedAt,
+ isLive ? nowTick : undefined,
+ );
+ if (!duration) return null;
+
+ return (
+
+ {isLive ? (
+
+ ) : null}
+
+ {duration}
+
+
+ );
+});
diff --git a/apps/dashboard/src/components/workflows/workflow-run-sidebar.tsx b/apps/dashboard/src/components/workflows/workflow-run-sidebar.tsx
new file mode 100644
index 0000000..64a2db0
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/workflow-run-sidebar.tsx
@@ -0,0 +1,95 @@
+import { FilterIcon } from "@diffkit/icons";
+import { Skeleton } from "@diffkit/ui/components/skeleton";
+import { cn } from "@diffkit/ui/lib/utils";
+import { Link } from "@tanstack/react-router";
+import {
+ CheckStateIcon,
+ getCheckState,
+} from "#/components/checks/check-state-icon";
+import {
+ DetailSidebar,
+ DetailSidebarSection,
+} from "#/components/details/detail-sidebar";
+import type { WorkflowRunJob } from "#/lib/github.types";
+
+export function WorkflowRunSidebar({
+ jobs,
+ isJobsLoading,
+ owner,
+ repo,
+ runId,
+ activeJobId = null,
+}: {
+ jobs: WorkflowRunJob[];
+ isJobsLoading: boolean;
+ owner: string;
+ repo: string;
+ runId: number;
+ activeJobId?: number | null;
+}) {
+ return (
+
+
+
+
+ }
+ >
+
+ {isJobsLoading && jobs.length === 0 ? (
+
+ ) : jobs.length === 0 ? (
+
+ No jobs yet.
+
+ ) : (
+ jobs.map((job) => {
+ const state = getCheckState(job);
+ const isActive = job.id === activeJobId;
+ return (
+
+
+
{job.name}
+
+ );
+ })
+ )}
+
+
+
+ );
+}
+
+function JobListSkeleton() {
+ return (
+
+ {[0, 1, 2].map((i) => (
+
+
+
+
+ ))}
+
+ );
+}
diff --git a/apps/dashboard/src/components/workflows/workflow-run-summary.tsx b/apps/dashboard/src/components/workflows/workflow-run-summary.tsx
new file mode 100644
index 0000000..ea4e443
--- /dev/null
+++ b/apps/dashboard/src/components/workflows/workflow-run-summary.tsx
@@ -0,0 +1,213 @@
+import { AlertCircleIcon, ClockIcon, PackageIcon } from "@diffkit/icons";
+import { Skeleton } from "@diffkit/ui/components/skeleton";
+import {
+ StatePill,
+ type StatePillTone,
+} from "@diffkit/ui/components/state-pill";
+import { Link } from "@tanstack/react-router";
+import type { ComponentType, ReactNode, SVGProps } from "react";
+import {
+ CheckStateIcon,
+ getCheckState,
+} from "#/components/checks/check-state-icon";
+import { CopyBadge } from "#/components/shared/copy-badge";
+import { formatRelativeTime } from "#/lib/format-relative-time";
+import type {
+ WorkflowRun,
+ WorkflowRunArtifact,
+ WorkflowRunJob,
+} from "#/lib/github.types";
+import { useNow } from "#/lib/use-now";
+
+export function WorkflowRunSummary({
+ run,
+ jobs,
+ artifacts,
+ isJobsLoading,
+}: {
+ run: WorkflowRun;
+ jobs: WorkflowRunJob[];
+ artifacts: WorkflowRunArtifact[];
+ isJobsLoading: boolean;
+}) {
+ const triggerTime = run.runStartedAt ?? run.createdAt;
+ const isRunLive = run.status !== "completed";
+ const staticDurationMs = isRunLive ? null : getCompletedDurationMs(run, jobs);
+
+ return (
+
+
+
+ Triggered via {run.event}
+
+
+ {run.triggeringActor ? (
+ <>
+

+
+ {run.triggeringActor.login}
+
+ >
+ ) : (
+
+ Unknown actor
+
+ )}
+ {run.pullRequests.length > 0 && run.pullRequests[0] ? (
+ <>
+
+ opened #{run.pullRequests[0].number}
+
+
+ >
+ ) : run.headBranch ? (
+
+ ) : null}
+
+
+
+
}
+ />
+
+ ) : staticDurationMs == null && isJobsLoading ? (
+
+ ) : (
+ formatDuration(staticDurationMs)
+ )
+ }
+ />
+
0 ? (
+
+ {artifacts.length}
+
+ ) : (
+ 0
+ )
+ }
+ />
+
+ );
+}
+
+function RelativeTime({ dateStr }: { dateStr: string }) {
+ const now = useNow();
+ return <>{formatRelativeTime(dateStr, now)}>;
+}
+
+function LiveTotalDuration({ run }: { run: WorkflowRun }) {
+ const now = useNow();
+ const start = run.runStartedAt ?? run.createdAt;
+ if (!start) return <>—>;
+ const startMs = new Date(start).getTime();
+ if (Number.isNaN(startMs)) return <>—>;
+ return <>{formatDuration(Math.max(0, now - startMs))}>;
+}
+
+type IconComponent = ComponentType<
+ SVGProps & { size?: number; strokeWidth?: number }
+>;
+
+function InfoCell({
+ label,
+ icon: Icon,
+ value,
+}: {
+ label: string;
+ icon: IconComponent;
+ value: ReactNode;
+}) {
+ return (
+
+
+
+ {label}
+
+ {value}
+
+ );
+}
+
+function StatusPill({ run }: { run: WorkflowRun }) {
+ const state = getCheckState(run);
+ const tone: StatePillTone =
+ state === "success" ? "open" : state === "failure" ? "closed" : "muted";
+ return (
+
+
+ {formatStatus(run)}
+
+ );
+}
+
+function formatStatus(run: WorkflowRun): string {
+ if (run.status === "completed") {
+ const c = run.conclusion;
+ if (c === "success") return "Success";
+ if (c === "failure") return "Failure";
+ if (c === "cancelled") return "Cancelled";
+ if (c === "skipped") return "Skipped";
+ if (c === "timed_out") return "Timed out";
+ if (c === "action_required") return "Action required";
+ if (c === "neutral") return "Neutral";
+ return c ?? "Completed";
+ }
+ if (run.status === "in_progress") return "In progress";
+ if (run.status === "queued") return "Queued";
+ if (run.status === "waiting") return "Waiting";
+ if (run.status === "pending") return "Pending";
+ if (run.status === "requested") return "Requested";
+ return run.status;
+}
+
+function getCompletedDurationMs(
+ run: WorkflowRun,
+ jobs: WorkflowRunJob[],
+): number | null {
+ const start = run.runStartedAt ?? run.createdAt;
+ if (!start) return null;
+ const startMs = new Date(start).getTime();
+ if (Number.isNaN(startMs)) return null;
+
+ const jobEndTimes = jobs
+ .map((j) =>
+ j.completedAt ? new Date(j.completedAt).getTime() : Number.NaN,
+ )
+ .filter((t) => !Number.isNaN(t));
+ const endMs =
+ jobEndTimes.length > 0
+ ? Math.max(...jobEndTimes)
+ : new Date(run.updatedAt).getTime();
+ if (Number.isNaN(endMs)) return null;
+ return Math.max(0, endMs - startMs);
+}
+
+function formatDuration(ms: number | null): string {
+ if (ms == null) return "—";
+ const totalSeconds = Math.floor(ms / 1000);
+ if (totalSeconds < 60) return `${totalSeconds}s`;
+ const minutes = Math.floor(totalSeconds / 60);
+ const seconds = totalSeconds % 60;
+ if (minutes < 60) return `${minutes}m ${seconds}s`;
+ const hours = Math.floor(minutes / 60);
+ return `${hours}h ${minutes % 60}m`;
+}
diff --git a/apps/dashboard/src/lib/format-relative-time.ts b/apps/dashboard/src/lib/format-relative-time.ts
index 6c3d312..3f543e2 100644
--- a/apps/dashboard/src/lib/format-relative-time.ts
+++ b/apps/dashboard/src/lib/format-relative-time.ts
@@ -1,5 +1,7 @@
-export function formatRelativeTime(dateStr: string): string {
- const seconds = Math.floor((Date.now() - new Date(dateStr).getTime()) / 1000);
+export function formatRelativeTime(dateStr: string, now?: number): string {
+ const dateMs = new Date(dateStr).getTime();
+ if (Number.isNaN(dateMs)) return "—";
+ const seconds = Math.floor(((now ?? Date.now()) - dateMs) / 1000);
if (seconds < 60) return "just now";
const minutes = Math.floor(seconds / 60);
if (minutes < 60) return `${minutes}m ago`;
diff --git a/apps/dashboard/src/lib/github-cache-policy.ts b/apps/dashboard/src/lib/github-cache-policy.ts
index 87e2194..6ed5862 100644
--- a/apps/dashboard/src/lib/github-cache-policy.ts
+++ b/apps/dashboard/src/lib/github-cache-policy.ts
@@ -32,6 +32,10 @@ export const githubCachePolicy = {
staleTimeMs: 15 * 1000,
gcTimeMs: 5 * 60 * 1000,
},
+ workflowRun: {
+ staleTimeMs: 15 * 1000,
+ gcTimeMs: 5 * 60 * 1000,
+ },
contributions: {
staleTimeMs: 60 * 60 * 1000,
gcTimeMs: 24 * 60 * 60 * 1000,
diff --git a/apps/dashboard/src/lib/github-revalidation.ts b/apps/dashboard/src/lib/github-revalidation.ts
index c5cf3c5..2c7d054 100644
--- a/apps/dashboard/src/lib/github-revalidation.ts
+++ b/apps/dashboard/src/lib/github-revalidation.ts
@@ -431,6 +431,14 @@ export function getGitHubRevalidationSignalKeysForTab(tab: Tab) {
];
}
+ if (tab.type === "actions") {
+ // `tab.number` is the human-readable run number (e.g. #42), not the API
+ // run_id, so we can't subscribe to a specific run/job entity here. The
+ // repo-wide actions signal covers the list view; per-entity refresh
+ // happens via useGitHubSignalStream calls in the page components.
+ return [githubRevalidationSignalKeys.actionsRepo({ owner, repo })];
+ }
+
if (tab.number == null) return [];
if (tab.type === "pull" || tab.type === "review") {
diff --git a/apps/dashboard/src/lib/github.functions.ts b/apps/dashboard/src/lib/github.functions.ts
index 8e48e29..580ed1f 100644
--- a/apps/dashboard/src/lib/github.functions.ts
+++ b/apps/dashboard/src/lib/github.functions.ts
@@ -1,5 +1,6 @@
import { createServerFn } from "@tanstack/react-start";
import { type Octokit as OctokitType, RequestError } from "octokit";
+import { parse as parseYaml } from "yaml";
import { debug } from "./debug";
import type {
BranchComparison,
@@ -60,6 +61,14 @@ import type {
TimelineEvent,
UserActivityEvent,
UserRepoSummary,
+ WorkflowDefinition,
+ WorkflowDefinitionJob,
+ WorkflowJobLogs,
+ WorkflowRun,
+ WorkflowRunArtifact,
+ WorkflowRunJob,
+ WorkflowRunLogsBundle,
+ WorkflowRunStep,
} from "./github.types";
import {
buildGitHubAppAuthorizePath,
@@ -3879,17 +3888,31 @@ async function computePullStatus(
const checkRuns = deduplicateCheckRuns(allCheckRuns);
const requiredContextSet = new Set(requiredContexts);
- const mappedCheckRuns: PullCheckRun[] = checkRuns.map((check) => ({
- id: check.id,
- name: check.name,
- status: check.status,
- conclusion: check.conclusion,
- appAvatarUrl: check.app?.owner?.avatar_url ?? null,
- outputTitle: check.output?.title ?? null,
- startedAt: check.started_at ?? null,
- htmlUrl: check.html_url ?? null,
- required: requiredContextSet.has(check.name),
- }));
+ const workflowRunIdByCheckSuiteId = new Map();
+ for (const run of allWorkflowRuns) {
+ if (run.check_suite_id != null) {
+ workflowRunIdByCheckSuiteId.set(run.check_suite_id, run.id);
+ }
+ }
+
+ const mappedCheckRuns: PullCheckRun[] = checkRuns.map((check) => {
+ const suiteId = check.check_suite?.id ?? null;
+ return {
+ id: check.id,
+ name: check.name,
+ status: check.status,
+ conclusion: check.conclusion,
+ appAvatarUrl: check.app?.owner?.avatar_url ?? null,
+ outputTitle: check.output?.title ?? null,
+ startedAt: check.started_at ?? null,
+ htmlUrl: check.html_url ?? null,
+ required: requiredContextSet.has(check.name),
+ workflowRunId:
+ suiteId != null
+ ? (workflowRunIdByCheckSuiteId.get(suiteId) ?? null)
+ : null,
+ };
+ });
// Commit statuses (e.g. CodeRabbit, CircleCI) — separate from Check Runs.
// GitHub's combined-status endpoint returns the latest status per context
@@ -3923,6 +3946,7 @@ async function computePullStatus(
startedAt: status.created_at ?? null,
htmlUrl: status.target_url ?? null,
required: requiredContextSet.has(status.context),
+ workflowRunId: null,
};
});
@@ -3943,6 +3967,7 @@ async function computePullStatus(
startedAt: null,
htmlUrl: null,
required: true,
+ workflowRunId: null,
}));
const combinedChecks: PullCheckRun[] = [
@@ -10019,3 +10044,804 @@ export const getRevalidationSignalTimestamps = createServerFn({
return getGitHubRevalidationSignals(data.signalKeys);
},
);
+
+export type WorkflowRunInput = {
+ owner: string;
+ repo: string;
+ runId: number;
+};
+
+export type WorkflowRunListStatus =
+ | "completed"
+ | "action_required"
+ | "cancelled"
+ | "failure"
+ | "neutral"
+ | "skipped"
+ | "stale"
+ | "success"
+ | "timed_out"
+ | "in_progress"
+ | "queued"
+ | "requested"
+ | "waiting"
+ | "pending";
+
+export type WorkflowRunsFromRepoInput = {
+ owner: string;
+ repo: string;
+ page?: number;
+ perPage?: number;
+ status?: WorkflowRunListStatus;
+ event?: string;
+ branch?: string;
+ actor?: string;
+ workflowId?: number;
+};
+
+type WorkflowRunRaw = Awaited<
+ ReturnType
+>["data"];
+type WorkflowRunJobRaw = Awaited<
+ ReturnType
+>["data"]["jobs"][number];
+type WorkflowRunStepRaw = NonNullable[number];
+type WorkflowRunArtifactRaw = Awaited<
+ ReturnType
+>["data"]["artifacts"][number];
+
+function mapWorkflowRunStep(raw: WorkflowRunStepRaw): WorkflowRunStep {
+ return {
+ number: raw.number,
+ name: raw.name,
+ status: raw.status,
+ conclusion: raw.conclusion ?? null,
+ startedAt: raw.started_at ?? null,
+ completedAt: raw.completed_at ?? null,
+ };
+}
+
+function mapWorkflowRunJob(raw: WorkflowRunJobRaw): WorkflowRunJob {
+ return {
+ id: raw.id,
+ runId: raw.run_id,
+ name: raw.name,
+ status: raw.status,
+ conclusion: raw.conclusion ?? null,
+ startedAt: raw.started_at ?? null,
+ completedAt: raw.completed_at ?? null,
+ htmlUrl: raw.html_url ?? null,
+ labels: raw.labels ?? [],
+ runnerName: raw.runner_name ?? null,
+ steps: (raw.steps ?? []).map(mapWorkflowRunStep),
+ };
+}
+
+function mapWorkflowRunArtifact(
+ raw: WorkflowRunArtifactRaw,
+): WorkflowRunArtifact {
+ return {
+ id: raw.id,
+ name: raw.name,
+ sizeInBytes: raw.size_in_bytes,
+ expired: raw.expired,
+ createdAt: raw.created_at ?? null,
+ expiresAt: raw.expires_at ?? null,
+ archiveDownloadUrl: raw.archive_download_url,
+ digest: raw.digest ?? null,
+ };
+}
+
+function mapWorkflowRun(
+ raw: WorkflowRunRaw,
+ options: { viewerCanRerun: boolean },
+): WorkflowRun {
+ return {
+ id: raw.id,
+ name: raw.name ?? null,
+ displayTitle: raw.display_title ?? raw.name ?? `Run #${raw.run_number}`,
+ status: raw.status ?? "queued",
+ conclusion: raw.conclusion ?? null,
+ event: raw.event,
+ headBranch: raw.head_branch ?? null,
+ headSha: raw.head_sha,
+ runNumber: raw.run_number,
+ runAttempt: raw.run_attempt ?? 1,
+ runStartedAt: raw.run_started_at ?? null,
+ createdAt: raw.created_at,
+ updatedAt: raw.updated_at,
+ htmlUrl: raw.html_url,
+ path: raw.path,
+ workflowId: raw.workflow_id,
+ actor: mapActor(raw.actor),
+ triggeringActor: mapActor(raw.triggering_actor),
+ pullRequests: (raw.pull_requests ?? []).map((pr) => ({
+ number: pr.number,
+ headRef: pr.head?.ref ?? "",
+ baseRef: pr.base?.ref ?? "",
+ })),
+ viewerCanRerun: options.viewerCanRerun,
+ };
+}
+
+export const getWorkflowRunsForRepo = createServerFn({ method: "GET" })
+ .inputValidator(identityValidator)
+ .handler(async ({ data }): Promise => {
+ const context = await getGitHubContextForRepository(data);
+ if (!context) {
+ return [];
+ }
+
+ const params = {
+ owner: data.owner,
+ repo: data.repo,
+ page: clampPage(data.page),
+ perPage: clampPerPage(data.perPage),
+ status: data.status,
+ event: data.event,
+ branch: data.branch,
+ actor: data.actor,
+ workflowId: data.workflowId,
+ };
+
+ return getCachedGitHubRequest<
+ Awaited<
+ ReturnType
+ >["data"],
+ WorkflowRun[]
+ >({
+ context,
+ resource: "actions.runs.repo",
+ params,
+ freshForMs: githubCachePolicy.list.staleTimeMs,
+ signalKeys: [
+ githubRevalidationSignalKeys.actionsRepo({
+ owner: data.owner,
+ repo: data.repo,
+ }),
+ ],
+ request: (headers) =>
+ context.octokit.rest.actions.listWorkflowRunsForRepo({
+ owner: data.owner,
+ repo: data.repo,
+ page: params.page,
+ per_page: params.perPage,
+ status: data.status,
+ event: data.event,
+ branch: data.branch,
+ actor: data.actor,
+ workflow_id: data.workflowId,
+ headers,
+ }),
+ mapData: (payload) =>
+ payload.workflow_runs.map((raw) =>
+ mapWorkflowRun(raw as unknown as WorkflowRunRaw, {
+ viewerCanRerun: false,
+ }),
+ ),
+ });
+ });
+
+export const getWorkflowRun = createServerFn({ method: "GET" })
+ .inputValidator(identityValidator)
+ .handler(async ({ data }): Promise => {
+ const context = await getGitHubContextForRepository(data);
+ if (!context) {
+ return null;
+ }
+
+ const userContext = await getGitHubUserContextForRepository(data);
+ const [run, userPerms, appPerms] = await Promise.all([
+ getCachedGitHubRequest({
+ context,
+ resource: "actions.run",
+ params: {
+ owner: data.owner,
+ repo: data.repo,
+ runId: data.runId,
+ },
+ freshForMs: githubCachePolicy.workflowRun.staleTimeMs,
+ signalKeys: [
+ githubRevalidationSignalKeys.workflowRunEntity({
+ owner: data.owner,
+ repo: data.repo,
+ runId: data.runId,
+ }),
+ ],
+ request: (headers) =>
+ context.octokit.rest.actions.getWorkflowRun({
+ owner: data.owner,
+ repo: data.repo,
+ run_id: data.runId,
+ headers,
+ }),
+ mapData: (payload) =>
+ mapWorkflowRun(payload, { viewerCanRerun: false }),
+ }).catch((error: unknown) => {
+ if (error instanceof RequestError && error.status === 404) {
+ return null;
+ }
+ throw error;
+ }),
+ getRepositoryPermissions(userContext, data.owner, data.repo),
+ getRepositoryPermissions(context, data.owner, data.repo),
+ ]);
+
+ if (!run) return null;
+
+ const permissions = mergeRepositoryPermissions(userPerms, appPerms);
+ const viewerCanRerun =
+ permissions?.push === true || permissions?.admin === true;
+ return { ...run, viewerCanRerun };
+ });
+
+export const listWorkflowRunJobs = createServerFn({ method: "GET" })
+ .inputValidator(identityValidator)
+ .handler(async ({ data }): Promise => {
+ const context = await getGitHubContextForRepository(data);
+ if (!context) {
+ return [];
+ }
+
+ try {
+ return await getCachedPaginatedGitHubRequest<
+ WorkflowRunJobRaw,
+ WorkflowRunJob[]
+ >({
+ context,
+ resource: "actions.run.jobs",
+ params: {
+ owner: data.owner,
+ repo: data.repo,
+ runId: data.runId,
+ },
+ freshForMs: githubCachePolicy.workflowRun.staleTimeMs,
+ signalKeys: [
+ githubRevalidationSignalKeys.workflowRunEntity({
+ owner: data.owner,
+ repo: data.repo,
+ runId: data.runId,
+ }),
+ ],
+ request: async (page) => {
+ const response =
+ await context.octokit.rest.actions.listJobsForWorkflowRun({
+ owner: data.owner,
+ repo: data.repo,
+ run_id: data.runId,
+ page,
+ per_page: 100,
+ });
+ return {
+ ...response,
+ data: response.data.jobs ?? [],
+ };
+ },
+ mapData: (items) => items.map(mapWorkflowRunJob),
+ });
+ } catch (error) {
+ console.error("[listWorkflowRunJobs]", error);
+ return [];
+ }
+ });
+
+export const listWorkflowRunArtifacts = createServerFn({ method: "GET" })
+ .inputValidator(identityValidator)
+ .handler(async ({ data }): Promise => {
+ const context = await getGitHubContextForRepository(data);
+ if (!context) {
+ return [];
+ }
+
+ try {
+ return await getCachedPaginatedGitHubRequest<
+ WorkflowRunArtifactRaw,
+ WorkflowRunArtifact[]
+ >({
+ context,
+ resource: "actions.run.artifacts",
+ params: {
+ owner: data.owner,
+ repo: data.repo,
+ runId: data.runId,
+ },
+ freshForMs: githubCachePolicy.workflowRun.staleTimeMs,
+ signalKeys: [
+ githubRevalidationSignalKeys.workflowRunEntity({
+ owner: data.owner,
+ repo: data.repo,
+ runId: data.runId,
+ }),
+ ],
+ request: async (page) => {
+ const response =
+ await context.octokit.rest.actions.listWorkflowRunArtifacts({
+ owner: data.owner,
+ repo: data.repo,
+ run_id: data.runId,
+ page,
+ per_page: 100,
+ });
+ return {
+ ...response,
+ data: response.data.artifacts ?? [],
+ };
+ },
+ mapData: (items) => items.map(mapWorkflowRunArtifact),
+ });
+ } catch (error) {
+ console.error("[listWorkflowRunArtifacts]", error);
+ return [];
+ }
+ });
+
+export const rerunWorkflowRun = createServerFn({ method: "POST" })
+ .inputValidator(identityValidator)
+ .handler(async ({ data }): Promise => {
+ const context = await getGitHubUserContextForRepository(data);
+ if (!context) {
+ return { ok: false, error: "Not authenticated" };
+ }
+
+ try {
+ await context.octokit.rest.actions.reRunWorkflow({
+ owner: data.owner,
+ repo: data.repo,
+ run_id: data.runId,
+ });
+ return { ok: true };
+ } catch (error) {
+ return toMutationError("rerun workflow run", error);
+ }
+ });
+
+export const rerunFailedWorkflowJobs = createServerFn({ method: "POST" })
+ .inputValidator(identityValidator)
+ .handler(async ({ data }): Promise => {
+ const context = await getGitHubUserContextForRepository(data);
+ if (!context) {
+ return { ok: false, error: "Not authenticated" };
+ }
+
+ try {
+ await context.octokit.rest.actions.reRunWorkflowFailedJobs({
+ owner: data.owner,
+ repo: data.repo,
+ run_id: data.runId,
+ });
+ return { ok: true };
+ } catch (error) {
+ return toMutationError("rerun failed workflow jobs", error);
+ }
+ });
+
+export type WorkflowDefinitionInput = {
+ owner: string;
+ repo: string;
+ path: string;
+ ref: string;
+};
+
+function parseWorkflowDefinition(yamlText: string): WorkflowDefinition | null {
+ let parsed: unknown;
+ try {
+ parsed = parseYaml(yamlText);
+ } catch {
+ return null;
+ }
+ if (!parsed || typeof parsed !== "object") return null;
+ const jobsRaw = (parsed as { jobs?: unknown }).jobs;
+ if (!jobsRaw || typeof jobsRaw !== "object") return null;
+
+ const jobs: WorkflowDefinitionJob[] = [];
+ for (const [key, value] of Object.entries(
+ jobsRaw as Record,
+ )) {
+ if (!value || typeof value !== "object") continue;
+ const v = value as {
+ needs?: string | string[];
+ name?: unknown;
+ strategy?: { matrix?: unknown };
+ };
+ const needs = Array.isArray(v.needs)
+ ? v.needs.filter((n): n is string => typeof n === "string")
+ : typeof v.needs === "string"
+ ? [v.needs]
+ : [];
+ const nameTemplate = typeof v.name === "string" ? v.name : null;
+ const isMatrix =
+ !!v.strategy && typeof v.strategy === "object" && "matrix" in v.strategy;
+ jobs.push({ key, needs, nameTemplate, isMatrix });
+ }
+ return { jobs };
+}
+
+export const getWorkflowDefinition = createServerFn({ method: "GET" })
+ .inputValidator(identityValidator)
+ .handler(async ({ data }): Promise => {
+ const context = await getGitHubContextForRepository(data);
+ if (!context) return null;
+
+ try {
+ const response = await context.octokit.rest.repos.getContent({
+ owner: data.owner,
+ repo: data.repo,
+ path: data.path,
+ ref: data.ref,
+ });
+ const payload = response.data;
+ if (
+ !payload ||
+ Array.isArray(payload) ||
+ payload.type !== "file" ||
+ typeof payload.content !== "string"
+ ) {
+ return null;
+ }
+ const encoding = payload.encoding ?? "base64";
+ const yamlText =
+ encoding === "base64"
+ ? Buffer.from(payload.content.replace(/\n/g, ""), "base64").toString(
+ "utf-8",
+ )
+ : payload.content;
+ return parseWorkflowDefinition(yamlText);
+ } catch (error) {
+ if (error instanceof RequestError && error.status === 404) {
+ return null;
+ }
+ console.error("[getWorkflowDefinition]", error);
+ return null;
+ }
+ });
+
+export type WorkflowJobLogsInput = {
+ owner: string;
+ repo: string;
+ jobId: number;
+};
+
+function decodeLogsPayload(data: unknown): string {
+ if (typeof data === "string") return data;
+ if (data instanceof ArrayBuffer) return new TextDecoder().decode(data);
+ if (ArrayBuffer.isView(data))
+ return new TextDecoder().decode(data as ArrayBufferView);
+ return "";
+}
+
+export const getWorkflowJobLogs = createServerFn({ method: "GET" })
+ .inputValidator(identityValidator)
+ .handler(async ({ data }): Promise => {
+ const repoInput = { owner: data.owner, repo: data.repo };
+ const tag = `[getWorkflowJobLogs ${data.owner}/${data.repo}#${data.jobId}]`;
+
+ const [appContext, userContext] = await Promise.all([
+ getGitHubContextForRepository(repoInput),
+ getGitHubUserContextForRepository(repoInput),
+ ]);
+
+ const seen = new Set();
+ const contexts = [
+ { label: "app", ctx: appContext },
+ { label: "user", ctx: userContext },
+ ].filter(
+ (
+ entry,
+ ): entry is { label: string; ctx: NonNullable } => {
+ if (!entry.ctx) return false;
+ if (seen.has(entry.ctx.octokit)) return false;
+ seen.add(entry.ctx.octokit);
+ return true;
+ },
+ );
+
+ console.log(`${tag} contexts`, {
+ appContext: !!appContext,
+ userContext: !!userContext,
+ tiers: contexts.map((c) => c.label),
+ });
+
+ if (contexts.length === 0) {
+ console.warn(`${tag} no usable context`);
+ return null;
+ }
+
+ let lastError: unknown = null;
+ for (const { label, ctx } of contexts) {
+ try {
+ console.log(`${tag} attempting`, label);
+ const response = await withGitHubOperationTimeout(
+ `${tag} ${label}`,
+ GITHUB_OPERATION_TIMEOUT_MS,
+ () =>
+ ctx.octokit.request(
+ "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs",
+ {
+ owner: data.owner,
+ repo: data.repo,
+ job_id: data.jobId,
+ },
+ ),
+ );
+ const logs = decodeLogsPayload(response.data);
+ const dataKind =
+ typeof response.data === "string"
+ ? "string"
+ : response.data instanceof ArrayBuffer
+ ? "ArrayBuffer"
+ : ArrayBuffer.isView(response.data)
+ ? "ArrayBufferView"
+ : typeof response.data;
+ console.log(`${tag} ok via ${label}`, {
+ status: response.status,
+ dataKind,
+ bytes: logs.length,
+ });
+ return {
+ logs,
+ fetchedAt: new Date().toISOString(),
+ notAvailable: false,
+ };
+ } catch (error) {
+ lastError = error;
+ const status = error instanceof RequestError ? error.status : undefined;
+ console.warn(`${tag} ${label} failed`, {
+ status,
+ message: error instanceof Error ? error.message : String(error),
+ });
+ if (status === 404 || status === 410) {
+ return {
+ logs: "",
+ fetchedAt: new Date().toISOString(),
+ notAvailable: true,
+ };
+ }
+ if (status === 401 || status === 403) {
+ continue;
+ }
+ console.error(`${tag} unexpected error`, error);
+ return null;
+ }
+ }
+ console.error(`${tag} all auth tiers failed`, lastError);
+ return null;
+ });
+
+export type WorkflowRunLogsBundleInput = {
+ owner: string;
+ repo: string;
+ runId: number;
+ attempt?: number;
+};
+
+const JOB_NAME_MAX_LENGTH = 90;
+
+/** Mirrors the gh CLI's `getJobNameForLogFilename`: strip path-illegal chars
+ * and truncate to 90 UTF-16 code units (matches the C# server's `string.Length`,
+ * which is also UTF-16 code units; JS strings index by the same units). */
+function sanitizeJobNameForZip(name: string): string {
+ const stripped = name.replace(/[/:]/g, "");
+ const truncated =
+ stripped.length > JOB_NAME_MAX_LENGTH
+ ? stripped.slice(0, JOB_NAME_MAX_LENGTH)
+ : stripped;
+ return truncated.trim();
+}
+
+const STEP_FILE_RE = /^(.+?)\/(\d+)_.*\.txt$/;
+const JOB_FILE_RE = /^(-?\d+)_(.+)\.txt$/;
+
+function decodeZipEntry(bytes: Uint8Array): string {
+ return new TextDecoder("utf-8").decode(bytes);
+}
+
+async function unzipBundle(
+ bytes: Uint8Array,
+): Promise> {
+ const { unzip } = await import("fflate");
+ return new Promise((resolve, reject) => {
+ unzip(
+ bytes,
+ {
+ filter: (file) => file.name.endsWith(".txt"),
+ },
+ (err, result) => {
+ if (err) reject(err);
+ else resolve(result);
+ },
+ );
+ });
+}
+
+function getOrCreateJobEntry(
+ jobs: WorkflowRunLogsBundle["jobs"],
+ jobName: string,
+): WorkflowRunLogsBundle["jobs"][string] {
+ const existing = jobs[jobName];
+ if (existing) return existing;
+ const entry = { jobName, jobLog: null, steps: {} };
+ jobs[jobName] = entry;
+ return entry;
+}
+
+async function parseLogsZip(
+ bytes: Uint8Array,
+): Promise {
+ const files = await unzipBundle(bytes);
+ const jobs: WorkflowRunLogsBundle["jobs"] = {};
+ for (const path in files) {
+ const data = files[path];
+ if (!data) continue;
+ const stepMatch = path.match(STEP_FILE_RE);
+ if (stepMatch) {
+ const jobName = stepMatch[1] ?? "";
+ const stepNumber = Number(stepMatch[2]);
+ if (!Number.isFinite(stepNumber)) continue;
+ const entry = getOrCreateJobEntry(jobs, jobName);
+ entry.steps[stepNumber] = decodeZipEntry(data);
+ continue;
+ }
+ const jobMatch = path.match(JOB_FILE_RE);
+ if (jobMatch) {
+ const jobName = jobMatch[2] ?? "";
+ const entry = getOrCreateJobEntry(jobs, jobName);
+ entry.jobLog = decodeZipEntry(data);
+ }
+ }
+ return jobs;
+}
+
+async function fetchWorkflowRunLogsBundleUncached(
+ data: WorkflowRunLogsBundleInput,
+): Promise {
+ const repoInput = { owner: data.owner, repo: data.repo };
+ const tag = `[getWorkflowRunLogsBundle ${data.owner}/${data.repo}#${data.runId}${data.attempt ? `@${data.attempt}` : ""}]`;
+
+ const [appContext, userContext] = await Promise.all([
+ getGitHubContextForRepository(repoInput),
+ getGitHubUserContextForRepository(repoInput),
+ ]);
+
+ const seen = new Set();
+ const contexts = [
+ { label: "app", ctx: appContext },
+ { label: "user", ctx: userContext },
+ ].filter(
+ (entry): entry is { label: string; ctx: NonNullable } => {
+ if (!entry.ctx) return false;
+ if (seen.has(entry.ctx.octokit)) return false;
+ seen.add(entry.ctx.octokit);
+ return true;
+ },
+ );
+
+ if (contexts.length === 0) {
+ console.warn(`${tag} no usable context`);
+ return null;
+ }
+
+ let lastError: unknown = null;
+ for (const { label, ctx } of contexts) {
+ try {
+ const attempt = data.attempt;
+ const response = await withGitHubOperationTimeout(
+ `${tag} ${label}`,
+ GITHUB_OPERATION_TIMEOUT_MS,
+ () =>
+ attempt
+ ? ctx.octokit.request(
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs",
+ {
+ owner: data.owner,
+ repo: data.repo,
+ run_id: data.runId,
+ attempt_number: attempt,
+ request: { parseSuccessResponseBody: false },
+ },
+ )
+ : ctx.octokit.request(
+ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs",
+ {
+ owner: data.owner,
+ repo: data.repo,
+ run_id: data.runId,
+ request: { parseSuccessResponseBody: false },
+ },
+ ),
+ );
+ const body = response.data as unknown;
+ const buffer =
+ body instanceof ArrayBuffer
+ ? body
+ : ArrayBuffer.isView(body)
+ ? (body as ArrayBufferView).buffer.slice(
+ (body as ArrayBufferView).byteOffset,
+ (body as ArrayBufferView).byteOffset +
+ (body as ArrayBufferView).byteLength,
+ )
+ : body && typeof (body as Response).arrayBuffer === "function"
+ ? await (body as Response).arrayBuffer()
+ : null;
+ if (!buffer) {
+ console.error(`${tag} unsupported response shape`, typeof body);
+ return null;
+ }
+ const bytes = new Uint8Array(buffer);
+ const jobs = await parseLogsZip(bytes);
+ console.log(`${tag} ok via ${label}`, {
+ status: response.status,
+ bytes: bytes.byteLength,
+ jobs: Object.keys(jobs).length,
+ });
+ return {
+ jobs,
+ fetchedAt: new Date().toISOString(),
+ notAvailable: false,
+ };
+ } catch (error) {
+ lastError = error;
+ const status = error instanceof RequestError ? error.status : undefined;
+ console.warn(`${tag} ${label} failed`, {
+ status,
+ message: error instanceof Error ? error.message : String(error),
+ });
+ if (status === 404 || status === 410) {
+ return {
+ jobs: {},
+ fetchedAt: new Date().toISOString(),
+ notAvailable: true,
+ };
+ }
+ if (status === 401 || status === 403) continue;
+ console.error(`${tag} unexpected error`, error);
+ return null;
+ }
+ }
+ console.error(`${tag} all auth tiers failed`, lastError);
+ return null;
+}
+
+export const getWorkflowRunLogsBundle = createServerFn({ method: "GET" })
+ .inputValidator(identityValidator)
+ .handler(async ({ data }): Promise => {
+ const context = await getGitHubContextForRepository(data);
+ if (!context) {
+ return fetchWorkflowRunLogsBundleUncached(data);
+ }
+
+ // Cache the parsed bundle. Once a run+attempt completes its data is
+ // immutable, so we use a long fresh window and rely on the
+ // workflowRunEntity signal (bumped by `workflow_run` and `workflow_job`
+ // webhooks) to invalidate while the run is still in-flight.
+ return getOrRevalidateGitHubResource({
+ userId: context.session.user.id,
+ resource: "actions.run.logsBundle",
+ params: {
+ owner: data.owner,
+ repo: data.repo,
+ runId: data.runId,
+ attempt: data.attempt ?? null,
+ },
+ freshForMs: 60 * 60 * 1000,
+ signalKeys: [
+ githubRevalidationSignalKeys.workflowRunEntity({
+ owner: data.owner,
+ repo: data.repo,
+ runId: data.runId,
+ }),
+ ],
+ fetcher: async () => {
+ const bundle = await fetchWorkflowRunLogsBundleUncached(data);
+ return {
+ kind: "success",
+ data: bundle,
+ metadata: createGitHubResponseMetadata(200, {}),
+ };
+ },
+ });
+ });
+
+/** Build the zip-file job name for a given API job name. Exported for tests/clients. */
+export function workflowZipJobName(jobName: string): string {
+ return sanitizeJobNameForZip(jobName);
+}
diff --git a/apps/dashboard/src/lib/github.query.ts b/apps/dashboard/src/lib/github.query.ts
index 154f9a4..d2dfc08 100644
--- a/apps/dashboard/src/lib/github.query.ts
+++ b/apps/dashboard/src/lib/github.query.ts
@@ -47,8 +47,17 @@ import {
getUserPinnedRepos,
getUserProfile,
getUserRepos,
+ getWorkflowDefinition,
+ getWorkflowJobLogs,
+ getWorkflowRun,
+ getWorkflowRunLogsBundle,
+ getWorkflowRunsForRepo,
+ listWorkflowRunArtifacts,
+ listWorkflowRunJobs,
type RepoTemplateKind,
searchCommandPaletteGitHub,
+ type WorkflowRunListStatus,
+ workflowZipJobName,
} from "./github.functions";
import { githubCachePolicy } from "./github-cache-policy";
import { ensureDefinedQueryData } from "./query-data";
@@ -122,6 +131,44 @@ export type IssueFromRepoQueryInput = {
issueNumber: number;
};
+export type WorkflowRunQueryInput = {
+ owner: string;
+ repo: string;
+ runId: number;
+};
+
+export type WorkflowRunsFromRepoQueryInput = {
+ owner: string;
+ repo: string;
+ page?: number;
+ perPage?: number;
+ status?: WorkflowRunListStatus;
+ event?: string;
+ branch?: string;
+ actor?: string;
+ workflowId?: number;
+};
+
+export type WorkflowDefinitionQueryInput = {
+ owner: string;
+ repo: string;
+ path: string;
+ ref: string;
+};
+
+export type WorkflowJobLogsQueryInput = {
+ owner: string;
+ repo: string;
+ jobId: number;
+};
+
+export type WorkflowRunLogsBundleQueryInput = {
+ owner: string;
+ repo: string;
+ runId: number;
+ attempt?: number;
+};
+
const persistedMeta = {
persist: true,
} as const;
@@ -283,6 +330,47 @@ export const githubQueryKeys = {
input: { all?: boolean; participating?: boolean },
) => ["github", scope.userId, "notifications", "list", input] as const,
},
+ actions: {
+ runsList: (
+ scope: GitHubQueryScope,
+ input: WorkflowRunsFromRepoQueryInput,
+ ) => ["github", scope.userId, "actions", "runsList", input] as const,
+ workflowRun: (scope: GitHubQueryScope, input: WorkflowRunQueryInput) =>
+ ["github", scope.userId, "actions", "workflowRun", input] as const,
+ workflowRunJobs: (scope: GitHubQueryScope, input: WorkflowRunQueryInput) =>
+ ["github", scope.userId, "actions", "workflowRunJobs", input] as const,
+ workflowRunArtifacts: (
+ scope: GitHubQueryScope,
+ input: WorkflowRunQueryInput,
+ ) =>
+ [
+ "github",
+ scope.userId,
+ "actions",
+ "workflowRunArtifacts",
+ input,
+ ] as const,
+ workflowDefinition: (
+ scope: GitHubQueryScope,
+ input: WorkflowDefinitionQueryInput,
+ ) =>
+ ["github", scope.userId, "actions", "workflowDefinition", input] as const,
+ workflowJobLogs: (
+ scope: GitHubQueryScope,
+ input: WorkflowJobLogsQueryInput,
+ ) => ["github", scope.userId, "actions", "workflowJobLogs", input] as const,
+ workflowRunLogsBundle: (
+ scope: GitHubQueryScope,
+ input: WorkflowRunLogsBundleQueryInput,
+ ) =>
+ [
+ "github",
+ scope.userId,
+ "actions",
+ "workflowRunLogsBundle",
+ input,
+ ] as const,
+ },
};
export function githubViewerQueryOptions(scope: GitHubQueryScope) {
@@ -904,3 +992,105 @@ export function githubNotificationsQueryOptions(
meta: persistedMeta,
});
}
+
+export function githubWorkflowRunsFromRepoQueryOptions(
+ scope: GitHubQueryScope,
+ input: WorkflowRunsFromRepoQueryInput,
+) {
+ return queryOptions({
+ queryKey: githubQueryKeys.actions.runsList(scope, input),
+ queryFn: () => getWorkflowRunsForRepo({ data: input }),
+ staleTime: githubCachePolicy.workflowRun.staleTimeMs,
+ gcTime: githubCachePolicy.workflowRun.gcTimeMs,
+ meta: tabPersistedMeta,
+ });
+}
+
+export function githubWorkflowRunQueryOptions(
+ scope: GitHubQueryScope,
+ input: WorkflowRunQueryInput,
+) {
+ return queryOptions({
+ queryKey: githubQueryKeys.actions.workflowRun(scope, input),
+ queryFn: () => getWorkflowRun({ data: input }),
+ staleTime: githubCachePolicy.workflowRun.staleTimeMs,
+ gcTime: githubCachePolicy.workflowRun.gcTimeMs,
+ meta: tabPersistedMeta,
+ });
+}
+
+export function githubWorkflowRunJobsQueryOptions(
+ scope: GitHubQueryScope,
+ input: WorkflowRunQueryInput,
+) {
+ return queryOptions({
+ queryKey: githubQueryKeys.actions.workflowRunJobs(scope, input),
+ queryFn: () => listWorkflowRunJobs({ data: input }),
+ staleTime: githubCachePolicy.workflowRun.staleTimeMs,
+ gcTime: githubCachePolicy.workflowRun.gcTimeMs,
+ meta: tabPersistedMeta,
+ });
+}
+
+export function githubWorkflowRunArtifactsQueryOptions(
+ scope: GitHubQueryScope,
+ input: WorkflowRunQueryInput,
+) {
+ return queryOptions({
+ queryKey: githubQueryKeys.actions.workflowRunArtifacts(scope, input),
+ queryFn: () => listWorkflowRunArtifacts({ data: input }),
+ staleTime: githubCachePolicy.workflowRun.staleTimeMs,
+ gcTime: githubCachePolicy.workflowRun.gcTimeMs,
+ meta: tabPersistedMeta,
+ });
+}
+
+export function githubWorkflowDefinitionQueryOptions(
+ scope: GitHubQueryScope,
+ input: WorkflowDefinitionQueryInput,
+) {
+ return queryOptions({
+ queryKey: githubQueryKeys.actions.workflowDefinition(scope, input),
+ queryFn: () => getWorkflowDefinition({ data: input }),
+ staleTime: githubCachePolicy.detail.staleTimeMs,
+ gcTime: githubCachePolicy.detail.gcTimeMs,
+ meta: tabPersistedMeta,
+ });
+}
+
+export function githubWorkflowJobLogsQueryOptions(
+ scope: GitHubQueryScope,
+ input: WorkflowJobLogsQueryInput,
+) {
+ return queryOptions({
+ queryKey: githubQueryKeys.actions.workflowJobLogs(scope, input),
+ queryFn: () => getWorkflowJobLogs({ data: input }),
+ staleTime: 2 * 1000,
+ gcTime: 60 * 1000,
+ meta: tabPersistedMeta,
+ });
+}
+
+/**
+ * Bundle of per-step logs derived from the run-level zip.
+ *
+ * Only meaningful for completed runs — the `/actions/runs/{runId}/logs`
+ * endpoint returns 404 / 410 (mapped to `notAvailable: true`) until the run
+ * finishes. Once completed, the data is immutable, so we cache aggressively.
+ */
+export function githubWorkflowRunLogsBundleQueryOptions(
+ scope: GitHubQueryScope,
+ input: WorkflowRunLogsBundleQueryInput,
+) {
+ return queryOptions({
+ queryKey: githubQueryKeys.actions.workflowRunLogsBundle(scope, input),
+ queryFn: () => getWorkflowRunLogsBundle({ data: input }),
+ staleTime: 60 * 60 * 1000,
+ gcTime: 4 * 60 * 60 * 1000,
+ meta: tabPersistedMeta,
+ });
+}
+
+/** Re-exported so client code can compute the sanitized job-name lookup key
+ * the same way the server stores entries in the bundle. */
+export { workflowZipJobName };
diff --git a/apps/dashboard/src/lib/github.types.ts b/apps/dashboard/src/lib/github.types.ts
index 826e412..4894171 100644
--- a/apps/dashboard/src/lib/github.types.ts
+++ b/apps/dashboard/src/lib/github.types.ts
@@ -253,6 +253,7 @@ export type PullCheckRun = {
startedAt: string | null;
htmlUrl: string | null;
required: boolean;
+ workflowRunId: number | null;
};
export type PullWorkflowApproval = {
@@ -261,6 +262,103 @@ export type PullWorkflowApproval = {
event: string;
};
+export type WorkflowRunPullRequestRef = {
+ number: number;
+ headRef: string;
+ baseRef: string;
+};
+
+export type WorkflowRun = {
+ id: number;
+ name: string | null;
+ displayTitle: string;
+ status: string;
+ conclusion: string | null;
+ event: string;
+ headBranch: string | null;
+ headSha: string;
+ runNumber: number;
+ runAttempt: number;
+ runStartedAt: string | null;
+ createdAt: string;
+ updatedAt: string;
+ htmlUrl: string;
+ path: string;
+ workflowId: number;
+ actor: GitHubActor | null;
+ triggeringActor: GitHubActor | null;
+ pullRequests: WorkflowRunPullRequestRef[];
+ viewerCanRerun: boolean;
+};
+
+export type WorkflowRunStep = {
+ number: number;
+ name: string;
+ status: string;
+ conclusion: string | null;
+ startedAt: string | null;
+ completedAt: string | null;
+};
+
+export type WorkflowRunJob = {
+ id: number;
+ runId: number;
+ name: string;
+ status: string;
+ conclusion: string | null;
+ startedAt: string | null;
+ completedAt: string | null;
+ htmlUrl: string | null;
+ labels: string[];
+ runnerName: string | null;
+ steps: WorkflowRunStep[];
+};
+
+export type WorkflowRunArtifact = {
+ id: number;
+ name: string;
+ sizeInBytes: number;
+ expired: boolean;
+ createdAt: string | null;
+ expiresAt: string | null;
+ archiveDownloadUrl: string;
+ digest: string | null;
+};
+
+export type WorkflowJobLogs = {
+ logs: string;
+ fetchedAt: string;
+ notAvailable: boolean;
+};
+
+/** Per-step logs derived from the run-level zip (`/actions/runs/{runId}/logs`).
+ * Keys are step numbers as they appear in the API job's `steps[].number`. */
+export type WorkflowJobStepLogs = {
+ /** Sanitized job name as it appears inside the zip (slash/colon stripped, 90-char UTF-16 truncated). */
+ jobName: string;
+ /** Whole-job log file from the top-level `{ordinal}_{jobName}.txt` entry, when present. */
+ jobLog: string | null;
+ /** Per-step files keyed by step number (e.g. `1` → contents of `/1_.txt`). */
+ steps: Record;
+};
+
+export type WorkflowRunLogsBundle = {
+ jobs: Record;
+ fetchedAt: string;
+ notAvailable: boolean;
+};
+
+export type WorkflowDefinitionJob = {
+ key: string;
+ needs: string[];
+ nameTemplate: string | null;
+ isMatrix: boolean;
+};
+
+export type WorkflowDefinition = {
+ jobs: WorkflowDefinitionJob[];
+};
+
export type PullReview = {
id: number;
state: string;
diff --git a/apps/dashboard/src/lib/tab-store.ts b/apps/dashboard/src/lib/tab-store.ts
index c2f6463..f37795f 100644
--- a/apps/dashboard/src/lib/tab-store.ts
+++ b/apps/dashboard/src/lib/tab-store.ts
@@ -1,6 +1,12 @@
import { useSyncExternalStore } from "react";
-export type TabType = "pull" | "issue" | "review" | "repo" | "commit";
+export type TabType =
+ | "pull"
+ | "issue"
+ | "review"
+ | "repo"
+ | "commit"
+ | "actions";
export interface Tab {
id: string;
@@ -24,6 +30,7 @@ const VALID_TAB_TYPES = {
review: true,
repo: true,
commit: true,
+ actions: true,
} satisfies Record;
function isValidTabType(type: unknown): type is TabType {
diff --git a/apps/dashboard/src/lib/use-now.ts b/apps/dashboard/src/lib/use-now.ts
new file mode 100644
index 0000000..28f71ee
--- /dev/null
+++ b/apps/dashboard/src/lib/use-now.ts
@@ -0,0 +1,36 @@
+import { useSyncExternalStore } from "react";
+
+let now = Date.now();
+const listeners = new Set<() => void>();
+let intervalId: ReturnType | null = null;
+
+function tick() {
+ now = Date.now();
+ for (const listener of listeners) listener();
+}
+
+function subscribe(listener: () => void) {
+ listeners.add(listener);
+ if (intervalId === null) {
+ intervalId = setInterval(tick, 1000);
+ }
+ return () => {
+ listeners.delete(listener);
+ if (listeners.size === 0 && intervalId !== null) {
+ clearInterval(intervalId);
+ intervalId = null;
+ }
+ };
+}
+
+function getSnapshot() {
+ return now;
+}
+
+function getServerSnapshot() {
+ return 0;
+}
+
+export function useNow(): number {
+ return useSyncExternalStore(subscribe, getSnapshot, getServerSnapshot);
+}
diff --git a/apps/dashboard/src/routeTree.gen.ts b/apps/dashboard/src/routeTree.gen.ts
index 98651e0..c44b43a 100644
--- a/apps/dashboard/src/routeTree.gen.ts
+++ b/apps/dashboard/src/routeTree.gen.ts
@@ -32,6 +32,7 @@ import { Route as ApiGithubAppCallbackRouteImport } from './routes/api/github/ap
import { Route as ApiGithubAppAuthorizeRouteImport } from './routes/api/github/app/authorize'
import { Route as ProtectedOwnerRepoPullsRouteImport } from './routes/_protected/$owner/$repo/pulls'
import { Route as ProtectedOwnerRepoIssuesIndexRouteImport } from './routes/_protected/$owner/$repo/issues.index'
+import { Route as ProtectedOwnerRepoActionsIndexRouteImport } from './routes/_protected/$owner/$repo/actions.index'
import { Route as ProtectedOwnerRepoTreeSplatRouteImport } from './routes/_protected/$owner/$repo/tree.$'
import { Route as ProtectedOwnerRepoReviewPullIdRouteImport } from './routes/_protected/$owner/$repo/review.$pullId'
import { Route as ProtectedOwnerRepoPullPullIdRouteImport } from './routes/_protected/$owner/$repo/pull.$pullId'
@@ -40,6 +41,8 @@ import { Route as ProtectedOwnerRepoIssuesIssueIdRouteImport } from './routes/_p
import { Route as ProtectedOwnerRepoCompareSplatRouteImport } from './routes/_protected/$owner/$repo/compare.$'
import { Route as ProtectedOwnerRepoCommitShaRouteImport } from './routes/_protected/$owner/$repo/commit.$sha'
import { Route as ProtectedOwnerRepoBlobSplatRouteImport } from './routes/_protected/$owner/$repo/blob.$'
+import { Route as ProtectedOwnerRepoActionsRunsRunIdRouteImport } from './routes/_protected/$owner/$repo/actions.runs.$runId'
+import { Route as ProtectedOwnerRepoActionsRunsRunIdJobJobIdRouteImport } from './routes/_protected/$owner/$repo/actions.runs.$runId_.job.$jobId'
const TermsRoute = TermsRouteImport.update({
id: '/terms',
@@ -157,6 +160,12 @@ const ProtectedOwnerRepoIssuesIndexRoute =
path: '/$owner/$repo/issues/',
getParentRoute: () => ProtectedRoute,
} as any)
+const ProtectedOwnerRepoActionsIndexRoute =
+ ProtectedOwnerRepoActionsIndexRouteImport.update({
+ id: '/$owner/$repo/actions/',
+ path: '/$owner/$repo/actions/',
+ getParentRoute: () => ProtectedRoute,
+ } as any)
const ProtectedOwnerRepoTreeSplatRoute =
ProtectedOwnerRepoTreeSplatRouteImport.update({
id: '/$owner/$repo/tree/$',
@@ -205,6 +214,18 @@ const ProtectedOwnerRepoBlobSplatRoute =
path: '/$owner/$repo/blob/$',
getParentRoute: () => ProtectedRoute,
} as any)
+const ProtectedOwnerRepoActionsRunsRunIdRoute =
+ ProtectedOwnerRepoActionsRunsRunIdRouteImport.update({
+ id: '/$owner/$repo/actions/runs/$runId',
+ path: '/$owner/$repo/actions/runs/$runId',
+ getParentRoute: () => ProtectedRoute,
+ } as any)
+const ProtectedOwnerRepoActionsRunsRunIdJobJobIdRoute =
+ ProtectedOwnerRepoActionsRunsRunIdJobJobIdRouteImport.update({
+ id: '/$owner/$repo/actions/runs/$runId_/job/$jobId',
+ path: '/$owner/$repo/actions/runs/$runId/job/$jobId',
+ getParentRoute: () => ProtectedRoute,
+ } as any)
export interface FileRoutesByFullPath {
'/$': typeof SplatRoute
@@ -236,7 +257,10 @@ export interface FileRoutesByFullPath {
'/$owner/$repo/pull/$pullId': typeof ProtectedOwnerRepoPullPullIdRoute
'/$owner/$repo/review/$pullId': typeof ProtectedOwnerRepoReviewPullIdRoute
'/$owner/$repo/tree/$': typeof ProtectedOwnerRepoTreeSplatRoute
+ '/$owner/$repo/actions/': typeof ProtectedOwnerRepoActionsIndexRoute
'/$owner/$repo/issues/': typeof ProtectedOwnerRepoIssuesIndexRoute
+ '/$owner/$repo/actions/runs/$runId': typeof ProtectedOwnerRepoActionsRunsRunIdRoute
+ '/$owner/$repo/actions/runs/$runId/job/$jobId': typeof ProtectedOwnerRepoActionsRunsRunIdJobJobIdRoute
}
export interface FileRoutesByTo {
'/$': typeof SplatRoute
@@ -267,7 +291,10 @@ export interface FileRoutesByTo {
'/$owner/$repo/pull/$pullId': typeof ProtectedOwnerRepoPullPullIdRoute
'/$owner/$repo/review/$pullId': typeof ProtectedOwnerRepoReviewPullIdRoute
'/$owner/$repo/tree/$': typeof ProtectedOwnerRepoTreeSplatRoute
+ '/$owner/$repo/actions': typeof ProtectedOwnerRepoActionsIndexRoute
'/$owner/$repo/issues': typeof ProtectedOwnerRepoIssuesIndexRoute
+ '/$owner/$repo/actions/runs/$runId': typeof ProtectedOwnerRepoActionsRunsRunIdRoute
+ '/$owner/$repo/actions/runs/$runId/job/$jobId': typeof ProtectedOwnerRepoActionsRunsRunIdJobJobIdRoute
}
export interface FileRoutesById {
__root__: typeof rootRouteImport
@@ -301,7 +328,10 @@ export interface FileRoutesById {
'/_protected/$owner/$repo/pull/$pullId': typeof ProtectedOwnerRepoPullPullIdRoute
'/_protected/$owner/$repo/review/$pullId': typeof ProtectedOwnerRepoReviewPullIdRoute
'/_protected/$owner/$repo/tree/$': typeof ProtectedOwnerRepoTreeSplatRoute
+ '/_protected/$owner/$repo/actions/': typeof ProtectedOwnerRepoActionsIndexRoute
'/_protected/$owner/$repo/issues/': typeof ProtectedOwnerRepoIssuesIndexRoute
+ '/_protected/$owner/$repo/actions/runs/$runId': typeof ProtectedOwnerRepoActionsRunsRunIdRoute
+ '/_protected/$owner/$repo/actions/runs/$runId_/job/$jobId': typeof ProtectedOwnerRepoActionsRunsRunIdJobJobIdRoute
}
export interface FileRouteTypes {
fileRoutesByFullPath: FileRoutesByFullPath
@@ -335,7 +365,10 @@ export interface FileRouteTypes {
| '/$owner/$repo/pull/$pullId'
| '/$owner/$repo/review/$pullId'
| '/$owner/$repo/tree/$'
+ | '/$owner/$repo/actions/'
| '/$owner/$repo/issues/'
+ | '/$owner/$repo/actions/runs/$runId'
+ | '/$owner/$repo/actions/runs/$runId/job/$jobId'
fileRoutesByTo: FileRoutesByTo
to:
| '/$'
@@ -366,7 +399,10 @@ export interface FileRouteTypes {
| '/$owner/$repo/pull/$pullId'
| '/$owner/$repo/review/$pullId'
| '/$owner/$repo/tree/$'
+ | '/$owner/$repo/actions'
| '/$owner/$repo/issues'
+ | '/$owner/$repo/actions/runs/$runId'
+ | '/$owner/$repo/actions/runs/$runId/job/$jobId'
id:
| '__root__'
| '/$'
@@ -399,7 +435,10 @@ export interface FileRouteTypes {
| '/_protected/$owner/$repo/pull/$pullId'
| '/_protected/$owner/$repo/review/$pullId'
| '/_protected/$owner/$repo/tree/$'
+ | '/_protected/$owner/$repo/actions/'
| '/_protected/$owner/$repo/issues/'
+ | '/_protected/$owner/$repo/actions/runs/$runId'
+ | '/_protected/$owner/$repo/actions/runs/$runId_/job/$jobId'
fileRoutesById: FileRoutesById
}
export interface RootRouteChildren {
@@ -578,6 +617,13 @@ declare module '@tanstack/react-router' {
preLoaderRoute: typeof ProtectedOwnerRepoIssuesIndexRouteImport
parentRoute: typeof ProtectedRoute
}
+ '/_protected/$owner/$repo/actions/': {
+ id: '/_protected/$owner/$repo/actions/'
+ path: '/$owner/$repo/actions'
+ fullPath: '/$owner/$repo/actions/'
+ preLoaderRoute: typeof ProtectedOwnerRepoActionsIndexRouteImport
+ parentRoute: typeof ProtectedRoute
+ }
'/_protected/$owner/$repo/tree/$': {
id: '/_protected/$owner/$repo/tree/$'
path: '/$owner/$repo/tree/$'
@@ -634,6 +680,20 @@ declare module '@tanstack/react-router' {
preLoaderRoute: typeof ProtectedOwnerRepoBlobSplatRouteImport
parentRoute: typeof ProtectedRoute
}
+ '/_protected/$owner/$repo/actions/runs/$runId': {
+ id: '/_protected/$owner/$repo/actions/runs/$runId'
+ path: '/$owner/$repo/actions/runs/$runId'
+ fullPath: '/$owner/$repo/actions/runs/$runId'
+ preLoaderRoute: typeof ProtectedOwnerRepoActionsRunsRunIdRouteImport
+ parentRoute: typeof ProtectedRoute
+ }
+ '/_protected/$owner/$repo/actions/runs/$runId_/job/$jobId': {
+ id: '/_protected/$owner/$repo/actions/runs/$runId_/job/$jobId'
+ path: '/$owner/$repo/actions/runs/$runId/job/$jobId'
+ fullPath: '/$owner/$repo/actions/runs/$runId/job/$jobId'
+ preLoaderRoute: typeof ProtectedOwnerRepoActionsRunsRunIdJobJobIdRouteImport
+ parentRoute: typeof ProtectedRoute
+ }
}
}
@@ -669,7 +729,10 @@ interface ProtectedRouteChildren {
ProtectedOwnerRepoPullPullIdRoute: typeof ProtectedOwnerRepoPullPullIdRoute
ProtectedOwnerRepoReviewPullIdRoute: typeof ProtectedOwnerRepoReviewPullIdRoute
ProtectedOwnerRepoTreeSplatRoute: typeof ProtectedOwnerRepoTreeSplatRoute
+ ProtectedOwnerRepoActionsIndexRoute: typeof ProtectedOwnerRepoActionsIndexRoute
ProtectedOwnerRepoIssuesIndexRoute: typeof ProtectedOwnerRepoIssuesIndexRoute
+ ProtectedOwnerRepoActionsRunsRunIdRoute: typeof ProtectedOwnerRepoActionsRunsRunIdRoute
+ ProtectedOwnerRepoActionsRunsRunIdJobJobIdRoute: typeof ProtectedOwnerRepoActionsRunsRunIdJobJobIdRoute
}
const ProtectedRouteChildren: ProtectedRouteChildren = {
@@ -691,7 +754,12 @@ const ProtectedRouteChildren: ProtectedRouteChildren = {
ProtectedOwnerRepoPullPullIdRoute: ProtectedOwnerRepoPullPullIdRoute,
ProtectedOwnerRepoReviewPullIdRoute: ProtectedOwnerRepoReviewPullIdRoute,
ProtectedOwnerRepoTreeSplatRoute: ProtectedOwnerRepoTreeSplatRoute,
+ ProtectedOwnerRepoActionsIndexRoute: ProtectedOwnerRepoActionsIndexRoute,
ProtectedOwnerRepoIssuesIndexRoute: ProtectedOwnerRepoIssuesIndexRoute,
+ ProtectedOwnerRepoActionsRunsRunIdRoute:
+ ProtectedOwnerRepoActionsRunsRunIdRoute,
+ ProtectedOwnerRepoActionsRunsRunIdJobJobIdRoute:
+ ProtectedOwnerRepoActionsRunsRunIdJobJobIdRoute,
}
const ProtectedRouteWithChildren = ProtectedRoute._addFileChildren(
diff --git a/apps/dashboard/src/routes/_protected/$owner/$repo/actions.index.tsx b/apps/dashboard/src/routes/_protected/$owner/$repo/actions.index.tsx
new file mode 100644
index 0000000..e05b800
--- /dev/null
+++ b/apps/dashboard/src/routes/_protected/$owner/$repo/actions.index.tsx
@@ -0,0 +1,257 @@
+import { keepPreviousData, useQuery } from "@tanstack/react-query";
+import { createFileRoute, Link } from "@tanstack/react-router";
+import { useQueryStates } from "nuqs";
+import { useMemo } from "react";
+import {
+ applyRepoFilters,
+ deriveWorkflowRunApiStatus,
+ type FilterableItem,
+ FilterBar,
+ getFilterValues,
+ makeBranchFilterDef,
+ parseFilterString,
+ repoListUrlParsers,
+ repoWorkflowRunFilterDefs,
+ useRepoListFilters,
+ workflowRunSortOptions,
+} from "#/components/filters";
+import { DashboardContentLoading } from "#/components/layouts/dashboard-content-loading";
+import { SidePanelPortal } from "#/components/layouts/dashboard-side-panel";
+import { Pagination } from "#/components/pagination";
+import { RepoActivityCards } from "#/components/repo/repo-activity-cards";
+import { WorkflowRunRow } from "#/components/workflows/workflow-run-row";
+import {
+ githubQueryKeys,
+ githubRepoBranchesQueryOptions,
+ githubRepoOverviewQueryOptions,
+ githubViewerQueryOptions,
+ githubWorkflowRunsFromRepoQueryOptions,
+} from "#/lib/github.query";
+import type { WorkflowRun } from "#/lib/github.types";
+import { githubRevalidationSignalKeys } from "#/lib/github-revalidation";
+import { buildSeo, formatPageTitle } from "#/lib/seo";
+import { useGitHubSignalStream } from "#/lib/use-github-signal-stream";
+import { useHasMounted } from "#/lib/use-has-mounted";
+
+const PER_PAGE = 30;
+
+export const Route = createFileRoute("/_protected/$owner/$repo/actions/")({
+ ssr: false,
+ loader: ({ context, params }) => {
+ const scope = { userId: context.user.id };
+ // Prefetch first-page runs (no filters) and supporting data so the
+ // list paints from the server-side cache instantly. The component
+ // will refetch with URL-derived filters if those differ.
+ void context.queryClient.prefetchQuery(
+ githubWorkflowRunsFromRepoQueryOptions(scope, {
+ owner: params.owner,
+ repo: params.repo,
+ page: 1,
+ perPage: 30,
+ }),
+ );
+ void context.queryClient.prefetchQuery(
+ githubRepoOverviewQueryOptions(scope, {
+ owner: params.owner,
+ repo: params.repo,
+ }),
+ );
+ void context.queryClient.prefetchQuery(
+ githubRepoBranchesQueryOptions(scope, {
+ owner: params.owner,
+ repo: params.repo,
+ }),
+ );
+ void context.queryClient.prefetchQuery(githubViewerQueryOptions(scope));
+ },
+ head: ({ match, params }) =>
+ buildSeo({
+ path: match.pathname,
+ title: formatPageTitle(`Actions · ${params.owner}/${params.repo}`),
+ description: `Workflow runs for ${params.owner}/${params.repo}.`,
+ robots: "noindex",
+ }),
+ component: RepoActionsPage,
+});
+
+type RunFilterable = FilterableItem & {
+ status: string;
+ conclusion: string | null;
+ event: string;
+ headBranch: string | null;
+ run: WorkflowRun;
+};
+
+function toFilterable(run: WorkflowRun, ownerRepo: string): RunFilterable {
+ return {
+ id: run.id,
+ title: run.displayTitle,
+ updatedAt: run.updatedAt,
+ createdAt: run.createdAt,
+ comments: 0,
+ author: run.actor
+ ? { login: run.actor.login, avatarUrl: run.actor.avatarUrl }
+ : null,
+ repository: { fullName: ownerRepo },
+ state: run.status,
+ status: run.status,
+ conclusion: run.conclusion,
+ event: run.event,
+ headBranch: run.headBranch,
+ run,
+ };
+}
+
+function RepoActionsPage() {
+ const { user } = Route.useRouteContext();
+ const { owner, repo } = Route.useParams();
+ const scope = useMemo(() => ({ userId: user.id }), [user.id]);
+ const hasMounted = useHasMounted();
+ const ownerRepo = `${owner}/${repo}`;
+
+ const [urlParams] = useQueryStates(repoListUrlParsers);
+ const urlFilters = useMemo(
+ () => parseFilterString(urlParams.filters),
+ [urlParams.filters],
+ );
+ const statusValues = getFilterValues(urlFilters, "status");
+ const apiStatus = deriveWorkflowRunApiStatus(statusValues);
+ const eventValues = getFilterValues(urlFilters, "event");
+ const apiEvent = eventValues.size === 1 ? [...eventValues][0] : undefined;
+ const authorValues = getFilterValues(urlFilters, "author");
+ const apiActor = authorValues.size === 1 ? [...authorValues][0] : undefined;
+ const branchValues = getFilterValues(urlFilters, "branch");
+ const apiBranch = branchValues.size === 1 ? [...branchValues][0] : undefined;
+
+ const overviewQuery = useQuery({
+ ...githubRepoOverviewQueryOptions(scope, { owner, repo }),
+ enabled: hasMounted,
+ });
+
+ const branchesQuery = useQuery({
+ ...githubRepoBranchesQueryOptions(scope, { owner, repo }),
+ enabled: hasMounted,
+ });
+
+ const runsQueryInput = useMemo(
+ () => ({
+ owner,
+ repo,
+ page: urlParams.page,
+ perPage: PER_PAGE,
+ status: apiStatus,
+ event: apiEvent,
+ actor: apiActor,
+ branch: apiBranch,
+ }),
+ [owner, repo, urlParams.page, apiStatus, apiEvent, apiActor, apiBranch],
+ );
+
+ const query = useQuery({
+ ...githubWorkflowRunsFromRepoQueryOptions(scope, runsQueryInput),
+ enabled: hasMounted,
+ placeholderData: keepPreviousData,
+ });
+
+ const webhookTargets = useMemo(
+ () => [
+ {
+ queryKey: githubQueryKeys.actions.runsList(scope, runsQueryInput),
+ signalKeys: [githubRevalidationSignalKeys.actionsRepo({ owner, repo })],
+ },
+ ],
+ [scope, runsQueryInput, owner, repo],
+ );
+ useGitHubSignalStream(webhookTargets);
+
+ const runs = useMemo(() => query.data ?? [], [query.data]);
+ const filterableRuns = useMemo(
+ () => runs.map((r) => toFilterable(r, ownerRepo)),
+ [runs, ownerRepo],
+ );
+ const hasNextPage = runs.length === PER_PAGE;
+
+ const filterDefs = useMemo(() => {
+ const branchNames = branchesQuery.data?.map((b) => b.name) ?? [];
+ return [...repoWorkflowRunFilterDefs, makeBranchFilterDef(branchNames)];
+ }, [branchesQuery.data]);
+
+ const filterState = useRepoListFilters({
+ filterDefs,
+ sortOptions: workflowRunSortOptions,
+ defaultSortId: "updated",
+ items: filterableRuns,
+ });
+
+ const filtered = useMemo(
+ () => applyRepoFilters(filterableRuns, filterState),
+ [filterableRuns, filterState],
+ );
+
+ const repoData = overviewQuery.data;
+
+ return (
+ <>
+
+
+
+
Actions
+
+
+ {owner}/{repo}
+
+ · Workflow runs
+
+
+
+
+
+ {query.isLoading ? (
+
+
+
+ ) : (
+
+ {filtered.length === 0 && (
+
+ No workflow runs found.
+
+ )}
+ {filtered.map((item) => (
+
+
+
+ ))}
+
+ )}
+
+
+
+
+ {repoData && (
+
+
+
+ )}
+ >
+ );
+}
diff --git a/apps/dashboard/src/routes/_protected/$owner/$repo/actions.runs.$runId.tsx b/apps/dashboard/src/routes/_protected/$owner/$repo/actions.runs.$runId.tsx
new file mode 100644
index 0000000..b4139b2
--- /dev/null
+++ b/apps/dashboard/src/routes/_protected/$owner/$repo/actions.runs.$runId.tsx
@@ -0,0 +1,76 @@
+import { createFileRoute } from "@tanstack/react-router";
+import { WorkflowRunPage } from "#/components/workflows/workflow-run-page";
+import {
+ githubViewerQueryOptions,
+ githubWorkflowDefinitionQueryOptions,
+ githubWorkflowRunArtifactsQueryOptions,
+ githubWorkflowRunJobsQueryOptions,
+ githubWorkflowRunQueryOptions,
+} from "#/lib/github.query";
+import { buildSeo, formatPageTitle } from "#/lib/seo";
+
+type WorkflowRunSearch = {
+ pr?: number;
+};
+
+export const Route = createFileRoute(
+ "/_protected/$owner/$repo/actions/runs/$runId",
+)({
+ ssr: false,
+ validateSearch: (search: Record): WorkflowRunSearch => {
+ const raw = search.pr;
+ const parsed =
+ typeof raw === "number"
+ ? raw
+ : typeof raw === "string"
+ ? Number(raw)
+ : Number.NaN;
+ return Number.isInteger(parsed) && parsed > 0 ? { pr: parsed } : {};
+ },
+ loader: ({ context, params }) => {
+ const runId = Number(params.runId);
+ if (!Number.isInteger(runId) || runId <= 0) {
+ return { runTitle: null };
+ }
+ const scope = { userId: context.user.id };
+ const input = { owner: params.owner, repo: params.repo, runId };
+
+ const runOptions = githubWorkflowRunQueryOptions(scope, input);
+ void context.queryClient
+ .ensureQueryData(runOptions)
+ .then((run) => {
+ if (!run) return;
+ void context.queryClient.prefetchQuery(
+ githubWorkflowDefinitionQueryOptions(scope, {
+ owner: params.owner,
+ repo: params.repo,
+ path: run.path,
+ ref: run.headSha,
+ }),
+ );
+ })
+ .catch(() => {});
+ void context.queryClient.prefetchQuery(
+ githubWorkflowRunJobsQueryOptions(scope, input),
+ );
+ void context.queryClient.prefetchQuery(
+ githubWorkflowRunArtifactsQueryOptions(scope, input),
+ );
+ void context.queryClient.prefetchQuery(githubViewerQueryOptions(scope));
+
+ const cached = context.queryClient.getQueryData(runOptions.queryKey);
+ return {
+ runTitle: cached?.displayTitle ?? null,
+ };
+ },
+ head: ({ match, params }) =>
+ buildSeo({
+ path: match.pathname,
+ title: formatPageTitle(
+ match.loaderData?.runTitle ?? `Workflow run #${params.runId}`,
+ ),
+ description: `Workflow run #${params.runId} in ${params.owner}/${params.repo}.`,
+ robots: "noindex",
+ }),
+ component: WorkflowRunPage,
+});
diff --git a/apps/dashboard/src/routes/_protected/$owner/$repo/actions.runs.$runId_.job.$jobId.tsx b/apps/dashboard/src/routes/_protected/$owner/$repo/actions.runs.$runId_.job.$jobId.tsx
new file mode 100644
index 0000000..f946b66
--- /dev/null
+++ b/apps/dashboard/src/routes/_protected/$owner/$repo/actions.runs.$runId_.job.$jobId.tsx
@@ -0,0 +1,64 @@
+import { createFileRoute } from "@tanstack/react-router";
+import { WorkflowJobPage } from "#/components/workflows/workflow-job-page";
+import {
+ githubViewerQueryOptions,
+ githubWorkflowJobLogsQueryOptions,
+ githubWorkflowRunJobsQueryOptions,
+ githubWorkflowRunLogsBundleQueryOptions,
+ githubWorkflowRunQueryOptions,
+} from "#/lib/github.query";
+import { buildSeo, formatPageTitle } from "#/lib/seo";
+
+export const Route = createFileRoute(
+ "/_protected/$owner/$repo/actions/runs/$runId_/job/$jobId",
+)({
+ ssr: false,
+ loader: ({ context, params }) => {
+ const runId = Number(params.runId);
+ const jobId = Number(params.jobId);
+ const scope = { userId: context.user.id };
+ const runInput = { owner: params.owner, repo: params.repo, runId };
+
+ const runOptions = githubWorkflowRunQueryOptions(scope, runInput);
+ void context.queryClient.prefetchQuery(runOptions);
+ void context.queryClient.prefetchQuery(
+ githubWorkflowRunJobsQueryOptions(scope, runInput),
+ );
+ void context.queryClient.prefetchQuery(githubViewerQueryOptions(scope));
+
+ const cachedRun = context.queryClient.getQueryData(runOptions.queryKey);
+ if (cachedRun?.status === "completed") {
+ void context.queryClient.prefetchQuery(
+ githubWorkflowRunLogsBundleQueryOptions(scope, {
+ ...runInput,
+ attempt: cachedRun.runAttempt,
+ }),
+ );
+ } else {
+ void context.queryClient.prefetchQuery(
+ githubWorkflowJobLogsQueryOptions(scope, {
+ owner: params.owner,
+ repo: params.repo,
+ jobId,
+ }),
+ );
+ }
+
+ const jobsKey = githubWorkflowRunJobsQueryOptions(scope, runInput).queryKey;
+ const cachedJobs = context.queryClient.getQueryData(jobsKey);
+ const cachedJob = cachedJobs?.find((j) => j.id === jobId) ?? null;
+ return {
+ jobName: cachedJob?.name ?? null,
+ };
+ },
+ head: ({ match, params }) =>
+ buildSeo({
+ path: match.pathname,
+ title: formatPageTitle(
+ match.loaderData?.jobName ?? `Job #${params.jobId}`,
+ ),
+ description: `Workflow job #${params.jobId} in ${params.owner}/${params.repo}.`,
+ robots: "noindex",
+ }),
+ component: WorkflowJobPage,
+});
diff --git a/packages/icons/src/actions-icon.tsx b/packages/icons/src/actions-icon.tsx
new file mode 100644
index 0000000..5bd738b
--- /dev/null
+++ b/packages/icons/src/actions-icon.tsx
@@ -0,0 +1,35 @@
+import type { SVGProps } from "react";
+
+export function ActionsIcon(
+ props: SVGProps & {
+ size?: number | string;
+ strokeWidth?: number | string;
+ }
+) {
+ const { size = 24, width, height, strokeWidth = 1.5, ...rest } = props;
+ const isDecorative =
+ rest["aria-label"] == null && rest["aria-labelledby"] == null;
+ return (
+ // biome-ignore lint/a11y/noSvgWithoutTitle: consumer provides aria-label/aria-labelledby; otherwise marked aria-hidden (decorative)
+
+ );
+}
diff --git a/packages/icons/src/full-screen-icon.tsx b/packages/icons/src/full-screen-icon.tsx
new file mode 100644
index 0000000..d1913c6
--- /dev/null
+++ b/packages/icons/src/full-screen-icon.tsx
@@ -0,0 +1,30 @@
+import type { SVGProps } from "react";
+
+export function FullScreenIcon(
+ props: SVGProps & { size?: number }
+) {
+ const { size = 24, width, height, ...rest } = props;
+ const isDecorative =
+ rest["aria-label"] == null && rest["aria-labelledby"] == null;
+ return (
+ // biome-ignore lint/a11y/noSvgWithoutTitle: consumer provides aria-label/aria-labelledby; otherwise marked aria-hidden (decorative)
+
+ );
+}
diff --git a/packages/icons/src/index.ts b/packages/icons/src/index.ts
index 036fb07..20d184c 100644
--- a/packages/icons/src/index.ts
+++ b/packages/icons/src/index.ts
@@ -7,6 +7,7 @@ export {
ArchiveIcon,
ArrangeIcon as SortIcon,
ArrowDown01Icon as ChevronDownIcon,
+ ArrowExpand02Icon as ExpandIcon,
ArrowLeft01Icon as ChevronLeftIcon,
ArrowMoveDownRightIcon,
ArrowReloadHorizontalIcon as RefreshCwIcon,
@@ -19,6 +20,8 @@ export {
Cancel01Icon as CloseIcon,
Cancel01Icon as XIcon,
CancelCircleIcon as IssueClosedNotPlannedIcon,
+ CellsIcon,
+ CenterFocusIcon,
CheckListIcon as ReviewsIcon,
CheckmarkCircle01Icon as IssueClosedCompletedIcon,
CircleIcon,
@@ -53,9 +56,11 @@ export {
Logout01Icon as LogOutIcon,
Mail01Icon as MailIcon,
Message01Icon as MessageIcon,
+ MinusSignIcon,
Moon02Icon as MoonIcon,
MoreHorizontalIcon,
Notification01Icon as NotificationIcon,
+ PackageIcon,
PencilEdit01Icon as EditIcon,
PlusSignIcon,
Remove01Icon,
@@ -73,8 +78,10 @@ export {
ViewIcon,
WifiDisconnected01Icon as WifiOffIcon,
} from "@hugeicons/react";
+export { ActionsIcon } from "./actions-icon";
export { ArchiveDownIcon } from "./archive-down-icon";
export { GitHubLogo, GitHubWordmarkLogo, XLogo } from "./brand-logos";
+export { FullScreenIcon } from "./full-screen-icon";
export { PenIcon } from "./pen-icon";
export { SeparatorHorizontalIcon } from "./separator-horizontal-icon";
export { StarIcon } from "./star-icon";
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index d6cf06e..58dd2a3 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -80,6 +80,9 @@ importers:
'@tanstack/router-plugin':
specifier: ~1.167.12
version: 1.167.12(@tanstack/react-router@1.168.13(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(vite@7.3.2(@types/node@22.19.17)(jiti@2.6.1)(lightningcss@1.32.0)(tsx@4.21.0)(yaml@2.8.3))
+ '@xyflow/react':
+ specifier: ^12.10.2
+ version: 12.10.2(@types/react@19.2.14)(immer@11.1.4)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
agentation:
specifier: ^3.0.2
version: 3.0.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
@@ -89,6 +92,9 @@ importers:
drizzle-orm:
specifier: ^0.45.2
version: 0.45.2(@cloudflare/workers-types@4.20260413.1)(@opentelemetry/api@1.9.1)(kysely@0.28.15)
+ fflate:
+ specifier: ^0.8.2
+ version: 0.8.2
motion:
specifier: ^12.38.0
version: 12.38.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
@@ -116,6 +122,9 @@ importers:
tailwindcss:
specifier: ^4.1.18
version: 4.2.2
+ yaml:
+ specifier: ^2.8.3
+ version: 2.8.3
devDependencies:
'@biomejs/biome':
specifier: 2.4.5
@@ -2994,6 +3003,9 @@ packages:
'@types/d3-color@3.1.3':
resolution: {integrity: sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==}
+ '@types/d3-drag@3.0.7':
+ resolution: {integrity: sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ==}
+
'@types/d3-ease@3.0.2':
resolution: {integrity: sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==}
@@ -3006,6 +3018,9 @@ packages:
'@types/d3-scale@4.0.9':
resolution: {integrity: sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==}
+ '@types/d3-selection@3.0.11':
+ resolution: {integrity: sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w==}
+
'@types/d3-shape@3.1.8':
resolution: {integrity: sha512-lae0iWfcDeR7qt7rA88BNiqdvPS5pFVPpo5OfjElwNaT2yyekbM0C9vK+yqBqEmHr6lDkRnYNoTBYlAgJa7a4w==}
@@ -3015,6 +3030,12 @@ packages:
'@types/d3-timer@3.0.2':
resolution: {integrity: sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==}
+ '@types/d3-transition@3.0.9':
+ resolution: {integrity: sha512-uZS5shfxzO3rGlu0cC3bjmMFKsXv+SmZZcgp0KD22ts4uGXp5EVYGzu/0YdwZeKmddhcAccYtREJKkPfXkZuCg==}
+
+ '@types/d3-zoom@3.0.8':
+ resolution: {integrity: sha512-iqMC4/YlFCSlO8+2Ii1GGGliCAY4XdeG748w5vQUbevlbDu0zSjH/+jojorQVBK/se0j6DUFNPBGSqD3YWYnDw==}
+
'@types/debug@4.1.13':
resolution: {integrity: sha512-KSVgmQmzMwPlmtljOomayoR89W4FynCAi3E8PPs7vmDVPe84hT+vGPKkJfThkmXs0x0jAaa9U8uW8bbfyS2fWw==}
@@ -3097,6 +3118,15 @@ packages:
'@vitest/utils@3.2.4':
resolution: {integrity: sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==}
+ '@xyflow/react@12.10.2':
+ resolution: {integrity: sha512-CgIi6HwlcHXwlkTpr0fxLv/0sRVNZ8IdwKLzzeCscaYBwpvfcH1QFOCeaTCuEn1FQEs/B8CjnTSjhs8udgmBgQ==}
+ peerDependencies:
+ react: '>=17'
+ react-dom: '>=17'
+
+ '@xyflow/system@0.0.76':
+ resolution: {integrity: sha512-hvwvnRS1B3REwVDlWexsq7YQaPZeG3/mKo1jv38UmnpWmxihp14bW6VtEOuHEwJX2FvzFw8k77LyKSk/wiZVNA==}
+
acorn@8.16.0:
resolution: {integrity: sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==}
engines: {node: '>=0.4.0'}
@@ -3343,6 +3373,9 @@ packages:
class-variance-authority@0.7.1:
resolution: {integrity: sha512-Ka+9Trutv7G8M6WT6SeiRWz792K5qEqIGEGzXKhAE6xOWAY6pPH8U+9IY3oCMv6kqTmLsv7Xh/2w2RigkePMsg==}
+ classcat@5.0.5:
+ resolution: {integrity: sha512-JhZUT7JFcQy/EzW605k/ktHtncoo9vnyW/2GspNYwFlN1C/WmjuV/xtS04e9SOkL2sTdw0VAZ2UGCcQ9lR6p6w==}
+
cli-cursor@5.0.0:
resolution: {integrity: sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==}
engines: {node: '>=18'}
@@ -3416,6 +3449,14 @@ packages:
resolution: {integrity: sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==}
engines: {node: '>=12'}
+ d3-dispatch@3.0.1:
+ resolution: {integrity: sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==}
+ engines: {node: '>=12'}
+
+ d3-drag@3.0.0:
+ resolution: {integrity: sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==}
+ engines: {node: '>=12'}
+
d3-ease@3.0.1:
resolution: {integrity: sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==}
engines: {node: '>=12'}
@@ -3436,6 +3477,10 @@ packages:
resolution: {integrity: sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==}
engines: {node: '>=12'}
+ d3-selection@3.0.0:
+ resolution: {integrity: sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==}
+ engines: {node: '>=12'}
+
d3-shape@3.2.0:
resolution: {integrity: sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==}
engines: {node: '>=12'}
@@ -3452,6 +3497,16 @@ packages:
resolution: {integrity: sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==}
engines: {node: '>=12'}
+ d3-transition@3.0.1:
+ resolution: {integrity: sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==}
+ engines: {node: '>=12'}
+ peerDependencies:
+ d3-selection: 2 - 3
+
+ d3-zoom@3.0.0:
+ resolution: {integrity: sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==}
+ engines: {node: '>=12'}
+
data-urls@7.0.0:
resolution: {integrity: sha512-23XHcCF+coGYevirZceTVD7NdJOqVn+49IHyxgszm+JIiHLoB2TkmPtsYkNWT1pvRSGkc35L6NHs0yHkN2SumA==}
engines: {node: ^20.19.0 || ^22.12.0 || >=24.0.0}
@@ -3742,6 +3797,9 @@ packages:
picomatch:
optional: true
+ fflate@0.8.2:
+ resolution: {integrity: sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==}
+
file-selector@2.1.2:
resolution: {integrity: sha512-QgXo+mXTe8ljeqUFaX3QVHc5osSItJ/Km+xpocx0aSqWGMSCf6qYs/VnzZgS864Pjn5iceMRFigeAV7AfTlaig==}
engines: {node: '>= 12'}
@@ -5157,6 +5215,21 @@ packages:
zod@4.3.6:
resolution: {integrity: sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==}
+ zustand@4.5.7:
+ resolution: {integrity: sha512-CHOUy7mu3lbD6o6LJLfllpjkzhHXSBlX8B9+qPddUsIfeF5S/UZ5q0kmCsnRqT1UHFQZchNFDDzMbQsuesHWlw==}
+ engines: {node: '>=12.7.0'}
+ peerDependencies:
+ '@types/react': '>=16.8'
+ immer: '>=9.0.6'
+ react: '>=16.8'
+ peerDependenciesMeta:
+ '@types/react':
+ optional: true
+ immer:
+ optional: true
+ react:
+ optional: true
+
zwitch@2.0.4:
resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==}
@@ -7517,6 +7590,10 @@ snapshots:
'@types/d3-color@3.1.3': {}
+ '@types/d3-drag@3.0.7':
+ dependencies:
+ '@types/d3-selection': 3.0.11
+
'@types/d3-ease@3.0.2': {}
'@types/d3-interpolate@3.0.4':
@@ -7529,6 +7606,8 @@ snapshots:
dependencies:
'@types/d3-time': 3.0.4
+ '@types/d3-selection@3.0.11': {}
+
'@types/d3-shape@3.1.8':
dependencies:
'@types/d3-path': 3.1.1
@@ -7537,6 +7616,15 @@ snapshots:
'@types/d3-timer@3.0.2': {}
+ '@types/d3-transition@3.0.9':
+ dependencies:
+ '@types/d3-selection': 3.0.11
+
+ '@types/d3-zoom@3.0.8':
+ dependencies:
+ '@types/d3-interpolate': 3.0.4
+ '@types/d3-selection': 3.0.11
+
'@types/debug@4.1.13':
dependencies:
'@types/ms': 2.1.0
@@ -7637,6 +7725,29 @@ snapshots:
loupe: 3.2.1
tinyrainbow: 2.0.0
+ '@xyflow/react@12.10.2(@types/react@19.2.14)(immer@11.1.4)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)':
+ dependencies:
+ '@xyflow/system': 0.0.76
+ classcat: 5.0.5
+ react: 19.2.4
+ react-dom: 19.2.4(react@19.2.4)
+ zustand: 4.5.7(@types/react@19.2.14)(immer@11.1.4)(react@19.2.4)
+ transitivePeerDependencies:
+ - '@types/react'
+ - immer
+
+ '@xyflow/system@0.0.76':
+ dependencies:
+ '@types/d3-drag': 3.0.7
+ '@types/d3-interpolate': 3.0.4
+ '@types/d3-selection': 3.0.11
+ '@types/d3-transition': 3.0.9
+ '@types/d3-zoom': 3.0.8
+ d3-drag: 3.0.0
+ d3-interpolate: 3.0.1
+ d3-selection: 3.0.0
+ d3-zoom: 3.0.0
+
acorn@8.16.0: {}
agent-base@7.1.4: {}
@@ -7841,6 +7952,8 @@ snapshots:
dependencies:
clsx: 2.1.1
+ classcat@5.0.5: {}
+
cli-cursor@5.0.0:
dependencies:
restore-cursor: 5.1.0
@@ -7912,6 +8025,13 @@ snapshots:
d3-color@3.1.0: {}
+ d3-dispatch@3.0.1: {}
+
+ d3-drag@3.0.0:
+ dependencies:
+ d3-dispatch: 3.0.1
+ d3-selection: 3.0.0
+
d3-ease@3.0.1: {}
d3-format@3.1.2: {}
@@ -7930,6 +8050,8 @@ snapshots:
d3-time: 3.1.0
d3-time-format: 4.1.0
+ d3-selection@3.0.0: {}
+
d3-shape@3.2.0:
dependencies:
d3-path: 3.1.0
@@ -7944,6 +8066,23 @@ snapshots:
d3-timer@3.0.1: {}
+ d3-transition@3.0.1(d3-selection@3.0.0):
+ dependencies:
+ d3-color: 3.1.0
+ d3-dispatch: 3.0.1
+ d3-ease: 3.0.1
+ d3-interpolate: 3.0.1
+ d3-selection: 3.0.0
+ d3-timer: 3.0.1
+
+ d3-zoom@3.0.0:
+ dependencies:
+ d3-dispatch: 3.0.1
+ d3-drag: 3.0.0
+ d3-interpolate: 3.0.1
+ d3-selection: 3.0.0
+ d3-transition: 3.0.1(d3-selection@3.0.0)
+
data-urls@7.0.0(@noble/hashes@2.0.1):
dependencies:
whatwg-mimetype: 5.0.0
@@ -8200,6 +8339,8 @@ snapshots:
optionalDependencies:
picomatch: 4.0.4
+ fflate@0.8.2: {}
+
file-selector@2.1.2:
dependencies:
tslib: 2.8.1
@@ -9917,4 +10058,12 @@ snapshots:
zod@4.3.6: {}
+ zustand@4.5.7(@types/react@19.2.14)(immer@11.1.4)(react@19.2.4):
+ dependencies:
+ use-sync-external-store: 1.6.0(react@19.2.4)
+ optionalDependencies:
+ '@types/react': 19.2.14
+ immer: 11.1.4
+ react: 19.2.4
+
zwitch@2.0.4: {}