forked from sanbuphy/learn-coding-agent
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathuseDiffData.ts
More file actions
110 lines (93 loc) · 2.77 KB
/
useDiffData.ts
File metadata and controls
110 lines (93 loc) · 2.77 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
import type { StructuredPatchHunk } from 'diff'
import { useEffect, useMemo, useState } from 'react'
import {
fetchGitDiff,
fetchGitDiffHunks,
type GitDiffResult,
type GitDiffStats,
} from '../utils/gitDiff.js'
const MAX_LINES_PER_FILE = 400
export type DiffFile = {
path: string
linesAdded: number
linesRemoved: number
isBinary: boolean
isLargeFile: boolean
isTruncated: boolean
isNewFile?: boolean
isUntracked?: boolean
}
export type DiffData = {
stats: GitDiffStats | null
files: DiffFile[]
hunks: Map<string, StructuredPatchHunk[]>
loading: boolean
}
/**
* Hook to fetch current git diff data on demand.
* Fetches both stats and hunks when component mounts.
*/
export function useDiffData(): DiffData {
const [diffResult, setDiffResult] = useState<GitDiffResult | null>(null)
const [hunks, setHunks] = useState<Map<string, StructuredPatchHunk[]>>(
new Map(),
)
const [loading, setLoading] = useState(true)
// Fetch diff data on mount
useEffect(() => {
let cancelled = false
async function loadDiffData() {
try {
// Fetch both stats and hunks
const [statsResult, hunksResult] = await Promise.all([
fetchGitDiff(),
fetchGitDiffHunks(),
])
if (!cancelled) {
setDiffResult(statsResult)
setHunks(hunksResult)
setLoading(false)
}
} catch (_error) {
if (!cancelled) {
setDiffResult(null)
setHunks(new Map())
setLoading(false)
}
}
}
void loadDiffData()
return () => {
cancelled = true
}
}, [])
return useMemo(() => {
if (!diffResult) {
return { stats: null, files: [], hunks: new Map(), loading }
}
const { stats, perFileStats } = diffResult
const files: DiffFile[] = []
// Iterate over perFileStats to get all files including large/skipped ones
for (const [path, fileStats] of perFileStats) {
const fileHunks = hunks.get(path)
const isUntracked = fileStats.isUntracked ?? false
// Detect large file (in perFileStats but not in hunks, and not binary/untracked)
const isLargeFile = !fileStats.isBinary && !isUntracked && !fileHunks
// Detect truncated file (total > limit means we truncated)
const totalLines = fileStats.added + fileStats.removed
const isTruncated =
!isLargeFile && !fileStats.isBinary && totalLines > MAX_LINES_PER_FILE
files.push({
path,
linesAdded: fileStats.added,
linesRemoved: fileStats.removed,
isBinary: fileStats.isBinary,
isLargeFile,
isTruncated,
isUntracked,
})
}
files.sort((a, b) => a.path.localeCompare(b.path))
return { stats, files, hunks, loading: false }
}, [diffResult, hunks, loading])
}