diff --git a/.env.example b/.env.example index be725bd..a4265e4 100644 --- a/.env.example +++ b/.env.example @@ -75,6 +75,9 @@ SEARCH_INDEX_EXTENSIONS = # 覆盖默认 # 开发者模式 - 控制调试信息显示 DEVELOPER_MODE = false # 是否启用开发者模式 | 默认关闭 +# 前端部署基路径(子路径部署时使用,例如 /repo-viewer/ | 根路径部署留空) +VITE_BASE_PATH = + # 开发者模式启用时提供以下功能: # - 控制台详细日志输出(API请求、文件操作、组件生命周期等) # - 分组调试信息(应用初始化、API请求流程等) diff --git a/.gitignore b/.gitignore index 4bcc95d..096c3ce 100644 --- a/.gitignore +++ b/.gitignore @@ -19,5 +19,7 @@ coverage .tmp .docfind report -src/generated/ +src/generated/*.generated.ts public/search-index/ +public/initial-content/ +/.serena diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 0000000..0bda682 --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,34 @@ +# Repository Guidelines + +## Project Structure & Module Organization + +`src/` contains the application code. UI lives in `src/components/`, reusable logic in `src/hooks/`, GitHub data access in `src/services/github/`, shared helpers in `src/utils/`, and app-wide state in `src/contexts/` and `src/providers/`. Theme tokens and component styling live under `src/theme/`. Static assets and generated search index files are served from `public/`. Serverless handlers are in `api/`, while build-time generators such as `generateInitialContent.ts` and `generateDocfindIndex.ts` live in `scripts/`. Project docs and screenshots are kept in `docs/`. + +## Build, Test, and Development Commands + +This repo uses Vite+ (`vp`) instead of `npm run` scripts. + +- `vp install` - install dependencies. +- `vp dev` - start the local development server. +- `vp build` - create a production build; also generates initial content and docfind artifacts. +- `vp check` - run the unified validation pipeline before opening a PR. +- `vp test` - run the Vitest suite. +- `vp run generate:index` - rebuild the static search index in `public/search-index/` when index-related code changes. + +Copy `.env.example` to `.env` before local work. + +## Coding Style & Naming Conventions + +Follow `.editorconfig`: UTF-8, LF, spaces, and 2-space indentation. Keep JS/TS/TSX lines near the 100-character limit. Prefer TypeScript, functional React components, and small focused modules. Use `PascalCase` for components (`FilePreviewPage.tsx`), `camelCase` for hooks and utilities (`useRepoSearch.ts`, `hashUtils.ts`), and descriptive folder names grouped by feature. Keep comments brief and only where intent is not obvious. + +## Testing Guidelines + +Vitest is configured in `vite.config.ts` and currently discovers `src/**/*.test.ts` with a Node environment. Place tests next to the code they cover, mirroring the source name, for example `src/utils/sorting/contentSorting.test.ts`. Add tests for new parsing, caching, indexing, or data transformation logic; for UI-heavy changes, include manual verification notes in the PR if automated coverage is not practical. + +## Commit & Pull Request Guidelines + +Recent history uses short release-style subjects such as `2.0.0` and `1.4.1`. For normal contributions, prefer concise imperative commit messages and keep unrelated changes separate. Open PRs against `dev`, not `master`. Include a clear description, link related issues, list verification steps (for example `vp check` and `vp test`), and attach screenshots for visible UI changes. + +## Configuration & Search Index Notes + +Review `.env.example` before changing GitHub API, proxy, or search-index behavior. Search index output under `public/search-index/` is generated content; update it only when the indexing pipeline or indexed branches change. diff --git a/api/github.test.ts b/api/github.test.ts new file mode 100644 index 0000000..c64915f --- /dev/null +++ b/api/github.test.ts @@ -0,0 +1,251 @@ +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; + +const { mockedAxiosGet } = vi.hoisted(() => ({ + mockedAxiosGet: vi.fn(), +})); + +vi.mock("axios", () => ({ + default: { + get: mockedAxiosGet, + }, +})); + +interface MockResponseState { + headers: Record; + jsonBody: unknown; + sentBody: unknown; + statusCode: number; +} + +const originalEnv = { ...process.env }; +const baseEnv = Object.fromEntries( + Object.entries(originalEnv).filter( + ([key]) => !key.startsWith("GITHUB_PAT") && !key.startsWith("VITE_GITHUB_PAT"), + ), +); + +const createMockRes = (): { + res: { + status: (code: number) => unknown; + json: (data: unknown) => unknown; + send: (data: unknown) => unknown; + setHeader: (name: string, value: string | number) => unknown; + }; + state: MockResponseState; +} => { + const state: MockResponseState = { + headers: {}, + jsonBody: null, + sentBody: null, + statusCode: 200, + }; + + const res = { + status(code: number) { + state.statusCode = code; + return res; + }, + json(data: unknown) { + state.jsonBody = data; + return res; + }, + send(data: unknown) { + state.sentBody = data; + return res; + }, + setHeader(name: string, value: string | number) { + state.headers[name] = value; + return res; + }, + }; + + return { res, state }; +}; + +const loadHandler = async (): Promise<(req: unknown, res: unknown) => Promise> => { + vi.resetModules(); + const mod = await import("./github"); + return mod.default as (req: unknown, res: unknown) => Promise; +}; + +describe("api/github handler security hardening", () => { + beforeEach(() => { + mockedAxiosGet.mockReset(); + process.env = { + ...baseEnv, + GITHUB_REPO_OWNER: "test-owner", + GITHUB_REPO_NAME: "test-repo", + GITHUB_REPO_BRANCH: "main", + GITHUB_PAT1: "", + }; + }); + + afterEach(() => { + process.env = { ...originalEnv }; + }); + + it("rejects deprecated getFileContent url parameter", async () => { + const handler = await loadHandler(); + const { res, state } = createMockRes(); + + await handler( + { + query: { + action: "getFileContent", + url: "https://example.com/test.txt", + }, + }, + res, + ); + + expect(state.statusCode).toBe(400); + expect(state.jsonBody).toEqual({ + error: "The url parameter is deprecated. Use path and optional branch instead.", + }); + expect(mockedAxiosGet).not.toHaveBeenCalled(); + }); + + it("rejects getFileContent without path", async () => { + const handler = await loadHandler(); + const { res, state } = createMockRes(); + + await handler( + { + query: { + action: "getFileContent", + }, + }, + res, + ); + + expect(state.statusCode).toBe(400); + expect(state.jsonBody).toEqual({ error: "Missing path parameter" }); + expect(mockedAxiosGet).not.toHaveBeenCalled(); + }); + + it("fetches repo files with composed raw URL and auth header", async () => { + process.env.GITHUB_PAT1 = "secret-token"; + const handler = await loadHandler(); + const { res, state } = createMockRes(); + + mockedAxiosGet.mockResolvedValueOnce({ + data: new Uint8Array([65, 66, 67]).buffer, + headers: { + "content-type": "text/plain; charset=utf-8", + }, + } as never); + + await handler( + { + query: { + action: "getFileContent", + path: "docs/readme.md", + branch: "main", + }, + }, + res, + ); + + expect(mockedAxiosGet).toHaveBeenCalledTimes(1); + const [calledUrl, calledConfig] = mockedAxiosGet.mock.calls[0] ?? []; + expect(calledUrl).toBe( + "https://raw.githubusercontent.com/test-owner/test-repo/main/docs/readme.md", + ); + expect(calledConfig?.maxRedirects).toBe(0); + expect(calledConfig?.headers?.Authorization).toBe("token secret-token"); + expect(state.statusCode).toBe(200); + expect(Buffer.isBuffer(state.sentBody)).toBe(true); + }); + + it("rejects getGitHubAsset non-https url", async () => { + const handler = await loadHandler(); + const { res, state } = createMockRes(); + + await handler( + { + query: { + action: "getGitHubAsset", + url: "http://raw.githubusercontent.com/test-owner/test-repo/main/a.md", + }, + }, + res, + ); + + expect(state.statusCode).toBe(400); + expect(state.jsonBody).toEqual({ error: "Only https protocol is allowed" }); + expect(mockedAxiosGet).not.toHaveBeenCalled(); + }); + + it("rejects getGitHubAsset non-allowlisted host", async () => { + const handler = await loadHandler(); + const { res, state } = createMockRes(); + + await handler( + { + query: { + action: "getGitHubAsset", + url: "https://example.com/assets/a.png", + }, + }, + res, + ); + + expect(state.statusCode).toBe(400); + expect(state.jsonBody).toEqual({ error: "Host is not allowed" }); + expect(mockedAxiosGet).not.toHaveBeenCalled(); + }); + + it("fetches allowlisted GitHub assets without Authorization", async () => { + process.env.GITHUB_PAT1 = "secret-token"; + const handler = await loadHandler(); + const { res, state } = createMockRes(); + + mockedAxiosGet.mockResolvedValueOnce({ + data: new Uint8Array([1, 2, 3]).buffer, + headers: { + "content-type": "image/png", + }, + } as never); + + await handler( + { + query: { + action: "getGitHubAsset", + url: "https://user-images.githubusercontent.com/123/abc.png", + }, + }, + res, + ); + + expect(mockedAxiosGet).toHaveBeenCalledTimes(1); + const [, calledConfig] = mockedAxiosGet.mock.calls[0] ?? []; + expect(calledConfig?.maxRedirects).toBe(0); + expect(calledConfig?.headers?.Authorization).toBeUndefined(); + expect(state.statusCode).toBe(200); + }); + + it("does not follow getGitHubAsset redirects", async () => { + const handler = await loadHandler(); + const { res, state } = createMockRes(); + + mockedAxiosGet.mockRejectedValueOnce({ + response: { + status: 302, + }, + message: "Found", + }); + + await handler( + { + query: { + action: "getGitHubAsset", + url: "https://raw.githubusercontent.com/test-owner/test-repo/main/file.png", + }, + }, + res, + ); + + expect(state.statusCode).toBe(302); + expect(state.jsonBody).toEqual({ error: "Failed to fetch GitHub asset" }); + }); +}); diff --git a/api/github.ts b/api/github.ts index 44de2e1..a195ea6 100644 --- a/api/github.ts +++ b/api/github.ts @@ -15,6 +15,17 @@ const colors = { // 配置常量 const GITHUB_API_BASE = "https://api.github.com"; +const PROXY_REQUEST_TIMEOUT_MS = 15000; +const GITHUB_ASSET_ALLOWED_HOSTS = new Set([ + "api.github.com", + "raw.githubusercontent.com", + "user-images.githubusercontent.com", + "objects.githubusercontent.com", + "avatars.githubusercontent.com", + "camo.githubusercontent.com", + "media.githubusercontent.com", + "github.githubassets.com", +]); const parseBooleanFlag = (value?: string | null): boolean => { if (typeof value !== "string") { @@ -256,6 +267,55 @@ const parsePositiveInt = ( return parsed; }; +const isAllowedGitHubAssetHost = (hostname: string): boolean => + GITHUB_ASSET_ALLOWED_HOSTS.has(hostname.toLowerCase()); + +const getPublicGitHubHeaders = (accept: string): Record => ({ + Accept: accept, + "User-Agent": "Repo-Viewer", +}); + +const getResponseHeader = ( + responseHeaders: AxiosResponse["headers"], + name: string, +): string | undefined => { + const normalizedName = name.toLowerCase(); + if (typeof responseHeaders.get === "function") { + const value = responseHeaders.get(normalizedName); + return typeof value === "string" ? value : undefined; + } + + const rawHeaders = responseHeaders as Record; + const direct = rawHeaders[normalizedName]; + if (typeof direct === "string") { + return direct; + } + + const fallback = rawHeaders[name]; + return typeof fallback === "string" ? fallback : undefined; +}; + +const copyFileResponseHeaders = ( + res: VercelResponse, + response: AxiosResponse, +): void => { + const upstreamContentType = getResponseHeader(response.headers, "content-type"); + const upstreamContentLength = getResponseHeader(response.headers, "content-length"); + const upstreamDisposition = getResponseHeader(response.headers, "content-disposition"); + const upstreamCacheControl = getResponseHeader(response.headers, "cache-control"); + + res.setHeader("Content-Type", upstreamContentType ?? "application/octet-stream"); + if (upstreamContentLength !== undefined) { + res.setHeader("Content-Length", upstreamContentLength); + } + if (upstreamDisposition !== undefined) { + res.setHeader("Content-Disposition", upstreamDisposition); + } + if (upstreamCacheControl !== undefined) { + res.setHeader("Cache-Control", upstreamCacheControl); + } +}; + // 构建认证头 function getAuthHeaders(): Record { const token = tokenManager.getCurrentToken(); @@ -633,13 +693,40 @@ export default async function handler(req: VercelRequest, res: VercelResponse): // 获取文件内容 if (actionParam === "getFileContent") { - const urlParam = Array.isArray(url) ? (url.length > 0 ? url[0] : undefined) : url; - if (typeof urlParam !== "string" || urlParam.trim().length === 0) { - res.status(400).json({ error: "Missing url parameter" }); + if (typeof getSingleQueryParam(url) === "string") { + res.status(400).json({ + error: "The url parameter is deprecated. Use path and optional branch instead.", + }); return; } - const urlString = urlParam; + const pathParam = getSingleQueryParam(path); + if (pathParam === undefined || pathParam.trim().length === 0) { + res.status(400).json({ error: "Missing path parameter" }); + return; + } + + const { repoOwner, repoName, repoBranch } = getRepoEnvConfig(); + if (repoOwner.length === 0 || repoName.length === 0) { + res.status(500).json({ + error: "Repository configuration missing", + message: "Missing GITHUB_REPO_OWNER or GITHUB_REPO_NAME environment variable", + }); + return; + } + + const branchToUse = + parseBranchOverride(branch) ?? (repoBranch.length > 0 ? repoBranch : "main"); + const normalizedPath = pathParam.trim().replace(/^\/+/u, ""); + if (normalizedPath.length === 0) { + res.status(400).json({ error: "Missing path parameter" }); + return; + } + + const encodedBranch = encodePathSegments(branchToUse); + const encodedPath = encodePathSegments(normalizedPath); + const rawUrl = `https://raw.githubusercontent.com/${repoOwner}/${repoName}/${encodedBranch}/${encodedPath}`; + try { const headers = { ...getAuthHeaders(), @@ -647,57 +734,79 @@ export default async function handler(req: VercelRequest, res: VercelResponse): }; const response = await handleRequestWithRetry>(() => - axios.get(urlString, { + axios.get(rawUrl, { headers, responseType: "arraybuffer", + timeout: PROXY_REQUEST_TIMEOUT_MS, + maxRedirects: 0, }), ); - const getHeader = (name: string): string | undefined => { - if (typeof response.headers.get === "function") { - const value = response.headers.get(name); - return typeof value === "string" ? value : undefined; - } - const rawValue = (response.headers as Record)[name]; - return typeof rawValue === "string" ? rawValue : undefined; - }; - - const upstreamContentType = getHeader("content-type"); - const upstreamContentLength = getHeader("content-length"); - const upstreamDisposition = getHeader("content-disposition"); - const upstreamCacheControl = getHeader("cache-control"); - - res.setHeader("Content-Type", upstreamContentType ?? "application/octet-stream"); - if (upstreamContentLength !== undefined) { - res.setHeader("Content-Length", upstreamContentLength); - } - if (upstreamDisposition !== undefined) { - res.setHeader("Content-Disposition", upstreamDisposition); - } - if (upstreamCacheControl !== undefined) { - res.setHeader("Cache-Control", upstreamCacheControl); - } - + copyFileResponseHeaders(res, response); const buffer = Buffer.from(response.data); res.status(200).send(buffer); return; } catch (error) { const axiosError = error as AxiosErrorResponse; - const decodedUrl = (() => { - try { - return decodeURIComponent(urlString); - } catch { - return urlString; - } - })(); apiLogger.error( "Failed to fetch file content:", - decodedUrl, + `${branchToUse}/${normalizedPath}`, axiosError.message ?? "Unknown error", ); res.status(axiosError.response?.status ?? 500).json({ error: "Failed to fetch file content", - message: axiosError.message ?? "Unknown error", + }); + return; + } + } + + // 获取 GitHub 静态资源(禁止带认证头) + if (actionParam === "getGitHubAsset") { + const urlParam = getSingleQueryParam(url); + if (urlParam === undefined || urlParam.trim().length === 0) { + res.status(400).json({ error: "Missing url parameter" }); + return; + } + + let parsedUrl: URL; + try { + parsedUrl = new URL(urlParam); + } catch { + res.status(400).json({ error: "Invalid url parameter" }); + return; + } + + if (parsedUrl.protocol !== "https:") { + res.status(400).json({ error: "Only https protocol is allowed" }); + return; + } + + if (!isAllowedGitHubAssetHost(parsedUrl.hostname)) { + res.status(400).json({ error: "Host is not allowed" }); + return; + } + + try { + const response = await axios.get(parsedUrl.toString(), { + headers: getPublicGitHubHeaders("application/vnd.github.v3.raw"), + responseType: "arraybuffer", + timeout: PROXY_REQUEST_TIMEOUT_MS, + maxRedirects: 0, + }); + + copyFileResponseHeaders(res, response); + res.status(200).send(Buffer.from(response.data)); + return; + } catch (error) { + const axiosError = error as AxiosErrorResponse; + const status = axiosError.response?.status ?? 500; + apiLogger.error( + "Failed to fetch GitHub asset:", + parsedUrl.toString(), + axiosError.message ?? "Unknown error", + ); + res.status(status).json({ + error: "Failed to fetch GitHub asset", }); return; } diff --git a/package-lock.json b/package-lock.json index 79e428b..63548e2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "repo-viewer", - "version": "2.0.0", + "version": "2.1.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "repo-viewer", - "version": "2.0.0", + "version": "2.1.0", "dependencies": { "@emotion/react": "^11.14.0", "@emotion/styled": "^11.14.1", diff --git a/package.json b/package.json index 40f29f3..95b8dae 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "repo-viewer", - "version": "2.0.0", + "version": "2.1.0", "private": true, "type": "module", "dependencies": { diff --git a/scripts/generateInitialContent.ts b/scripts/generateInitialContent.ts index 947a332..070ef2c 100644 --- a/scripts/generateInitialContent.ts +++ b/scripts/generateInitialContent.ts @@ -27,8 +27,24 @@ interface InitialContentHydrationPayload { metadata?: Record; } +interface InitialContentManifest { + version: number; + generatedAt: string; + repo: { + owner: string; + name: string; + }; + branches: Record< + string, + { + payloadPath: string; + } + >; +} + const rootDir = resolveRepoRoot(import.meta.url); -const outputPath = path.join(rootDir, "src", "generated", "initialContent.ts"); +const outputDir = path.join(rootDir, "public", "initial-content"); +const manifestOutputPath = path.join(outputDir, "manifest.json"); loadEnvFiles(rootDir); @@ -108,26 +124,50 @@ const readOptionalString = (record: Record, key: string): strin return typeof value === "string" ? value : null; }; -const buildOutput = (payload: InitialContentHydrationPayload | null): string => { - const serialize = (value: InitialContentHydrationPayload | null): string => - JSON.stringify(value, null, 2) - .replace(/\u2028/g, "\\u2028") - .replace(/\u2029/g, "\\u2029"); +const serializeJson = ( + value: JsonValue | InitialContentHydrationPayload | InitialContentManifest, +): string => + `${JSON.stringify(value, null, 2) + .replace(/\u2028/g, "\\u2028") + .replace(/\u2029/g, "\\u2029")}\n`; + +const buildManifest = ( + repoOwner: string, + repoName: string, + generatedAt: string, + branchEntries: InitialContentManifest["branches"], +): InitialContentManifest => ({ + version: 1, + generatedAt, + repo: { + owner: repoOwner, + name: repoName, + }, + branches: branchEntries, +}); - const content = payload ? serialize(payload) : "null"; +const writeOutput = async (payload: InitialContentHydrationPayload | null): Promise => { + await fs.promises.rm(outputDir, { recursive: true, force: true }); + await fs.promises.mkdir(outputDir, { recursive: true }); - return [ - 'import type { InitialContentHydrationPayload } from "@/types";', - "", - `export const initialContentPayload: InitialContentHydrationPayload | null = ${content};`, - "", - ].join("\n"); -}; + const generatedAt = new Date().toISOString(); + const manifest = buildManifest(owner, repo, generatedAt, {}); -const writeOutput = async (payload: InitialContentHydrationPayload | null): Promise => { - const output = buildOutput(payload); - await fs.promises.mkdir(path.dirname(outputPath), { recursive: true }); - await fs.promises.writeFile(outputPath, output, "utf8"); + if (payload !== null) { + const payloadFileName = `${encodeURIComponent(payload.branch)}.json`; + const payloadPath = path.join(outputDir, payloadFileName); + const payloadUrlPath = `/initial-content/${payloadFileName}`; + + payload.generatedAt = generatedAt; + manifest.generatedAt = generatedAt; + manifest.branches[payload.branch] = { + payloadPath: payloadUrlPath, + }; + + await fs.promises.writeFile(payloadPath, serializeJson(payload), "utf8"); + } + + await fs.promises.writeFile(manifestOutputPath, serializeJson(manifest), "utf8"); }; const run = async (): Promise => { @@ -180,7 +220,7 @@ const run = async (): Promise => { const payload: InitialContentHydrationPayload = { version: 1, - generatedAt: new Date().toISOString(), + generatedAt: "", branch, repo: { owner, diff --git a/src/App.tsx b/src/App.tsx index e66b01e..f3ffdbd 100644 --- a/src/App.tsx +++ b/src/App.tsx @@ -69,7 +69,7 @@ const App = React.memo(() => { (event.target === titleRef.current || titleRef.current.contains(event.target as Node)) ) { void resetApplicationState(); - window.location.href = "/"; + window.dispatchEvent(new Event("navigate-to-home")); } }, [isSmallScreen, resetApplicationState], diff --git a/src/components/layout/ToolbarButtons.tsx b/src/components/layout/ToolbarButtons.tsx index 1ce5849..dfd67c6 100644 --- a/src/components/layout/ToolbarButtons.tsx +++ b/src/components/layout/ToolbarButtons.tsx @@ -14,6 +14,7 @@ import { getGithubConfig } from "@/config"; import { logger } from "@/utils"; import { useContentContext, usePreviewContext } from "@/contexts/unified"; import { useI18n } from "@/contexts/I18nContext"; +import { getPreviewFromUrl } from "@/utils/routing/urlManager"; // 懒加载搜索组件 const SearchDrawer = lazy(async () => import("@/components/interactions/SearchDrawer")); @@ -353,8 +354,6 @@ const ToolbarButtons: React.FC = ({ const onGitHubClick = useCallback(() => { const { repoOwner, repoName } = repoInfo; - const pathname = window.location.pathname.slice(1); - const hash = window.location.hash; const activeBranch = currentBranch !== "" ? currentBranch : defaultBranch; const encodedBranch = encodeURIComponent(activeBranch); @@ -367,7 +366,7 @@ const ToolbarButtons: React.FC = ({ } }; - const safePath = pathname + const safePath = currentPath .split("/") .filter((segment) => segment.length > 0) .map(encodeSegment) @@ -375,9 +374,7 @@ const ToolbarButtons: React.FC = ({ let githubUrl = `https://github.com/${repoOwner}/${repoName}`; - const previewRegex = /#preview=([^&]+)/; - const previewMatch = previewRegex.exec(hash); - const previewTarget = previewMatch?.[1]; + const previewTarget = getPreviewFromUrl(); const hasPathname = safePath.length > 0; if (typeof previewTarget === "string" && previewTarget.length > 0 && hasPathname) { @@ -396,7 +393,7 @@ const ToolbarButtons: React.FC = ({ } window.open(githubUrl, "_blank"); - }, [repoInfo, currentBranch, defaultBranch]); + }, [repoInfo, currentBranch, defaultBranch, currentPath]); const openSearchDrawer = useCallback(() => { setSearchDrawerOpen(true); diff --git a/src/components/preview/markdown/utils/imageUtils.ts b/src/components/preview/markdown/utils/imageUtils.ts index 165c502..195e252 100644 --- a/src/components/preview/markdown/utils/imageUtils.ts +++ b/src/components/preview/markdown/utils/imageUtils.ts @@ -60,7 +60,10 @@ export const tryDirectImageLoad = (imgSrc: string): string | null => { try { let directPath: string | null = null; - if (imgSrc.includes("/api/github?action=getFileContent&url=")) { + if ( + imgSrc.includes("/api/github?action=getGitHubAsset&url=") || + imgSrc.includes("/api/github?action=getFileContent&url=") + ) { const encodedUrl = imgSrc.split("url=")[1]; if (encodedUrl !== undefined && encodedUrl.length > 0) { const decodedUrl = decodeURIComponent(encodedUrl); diff --git a/src/components/preview/text/TextPreview.tsx b/src/components/preview/text/TextPreview.tsx index 1dac413..fb0c12b 100644 --- a/src/components/preview/text/TextPreview.tsx +++ b/src/components/preview/text/TextPreview.tsx @@ -5,7 +5,7 @@ * 虚拟滚动优化、复制到剪贴板等特性。支持大文件的性能优化渲染。 */ -import React, { memo, useCallback, useEffect, useMemo, useState } from "react"; +import React, { memo, useCallback, useEffect, useMemo, useRef, useState } from "react"; import { Box, CircularProgress, @@ -26,7 +26,7 @@ import TextRotationNoneIcon from "@mui/icons-material/TextRotationNone"; import type { TextPreviewProps } from "./types"; import { formatFileSize } from "@/utils/format/formatters"; import { useI18n } from "@/contexts/I18nContext"; -import { highlightLines } from "@/utils/content/prismHighlighter"; +import { encodeLines, normalizeContentLines } from "@/utils/content/textPreviewLines"; import { detectLanguage } from "@/utils/content/languageDetector"; import { useCopyToClipboard } from "@/hooks/useCopyToClipboard"; import { useContainerSize } from "@/components/preview/image/hooks"; @@ -34,6 +34,8 @@ import { useContainerSize } from "@/components/preview/image/hooks"; /** 等宽字体栈 */ const MONO_FONT_STACK = "'JetBrains Mono', 'Fira Code', 'SFMono-Regular', ui-monospace, 'Source Code Pro', Menlo, Monaco, Consolas, 'Liberation Mono', 'Courier New', monospace"; +const MAX_SYNTACTIC_HIGHLIGHT_CHARS = 400_000; +const MAX_SYNTACTIC_HIGHLIGHT_LINES = 15_000; /** * 文本预览内容组件属性接口 @@ -55,18 +57,17 @@ const TextPreviewContent: React.FC = memo( const theme = useTheme(); const { t } = useI18n(); const [wrapText, setWrapText] = useState(false); + const highlightRequestIdRef = useRef(0); const { copied, copy } = useCopyToClipboard(); // 小屏/桌面字号与控件尺寸统一管理,避免分散调整 const contentFontSize = isSmallScreen ? "0.78rem" : "0.9rem"; const lineNumberFontSize = isSmallScreen ? "0.7rem" : "0.9rem"; const controlButtonSize = isSmallScreen ? 26 : 32; const controlIconSize = isSmallScreen ? 14 : 18; + const charCount = useMemo(() => content.length, [content]); const normalizedLines = useMemo(() => { - if (typeof content !== "string") { - return []; - } - return content.replace(/\r\n/g, "\n").split("\n"); + return normalizeContentLines(content); }, [content]); const lineCount = normalizedLines.length === 0 ? 1 : normalizedLines.length; @@ -79,22 +80,97 @@ const TextPreviewContent: React.FC = memo( }, [previewingName]); const [highlightedLines, setHighlightedLines] = useState([]); + const shouldBypassSyntaxHighlight = useMemo(() => { + return charCount > MAX_SYNTACTIC_HIGHLIGHT_CHARS || lineCount > MAX_SYNTACTIC_HIGHLIGHT_LINES; + }, [charCount, lineCount]); useEffect(() => { let cancelled = false; + let worker: Worker | null = null; + let timer: number | null = null; + let idleHandle: number | null = null; + const requestId = highlightRequestIdRef.current + 1; + highlightRequestIdRef.current = requestId; + + if ( + normalizedLines.length === 0 || + language === null || + language === "" || + shouldBypassSyntaxHighlight + ) { + setHighlightedLines([]); + return () => { + cancelled = true; + }; + } - // 语法高亮计算开销较大,尽量在空闲时间执行以保证首屏响应 - const runHighlight = (): void => { - if (normalizedLines.length === 0) { - if (!cancelled) { - setHighlightedLines([]); - } + const applyResult = (nextLines: string[], nextRequestId: number): void => { + if (cancelled || nextRequestId !== highlightRequestIdRef.current) { return; } - const result = highlightLines(normalizedLines, language); - if (!cancelled) { - setHighlightedLines(result); + setHighlightedLines(nextLines); + }; + + const runFallbackHighlight = async (): Promise => { + const { highlightContent } = await import("@/utils/content/prismHighlighter"); + if (cancelled) { + return; } + applyResult(highlightContent(content, language), requestId); + }; + + if (typeof Worker !== "undefined") { + try { + worker = new Worker( + new URL("../../../utils/content/prismHighlighter.worker.ts", import.meta.url), + { + type: "module", + }, + ); + + worker.onmessage = ( + event: MessageEvent<{ id: number; highlightedLines?: string[]; error?: string }>, + ): void => { + const { id, highlightedLines: nextLines } = event.data; + if (worker !== null) { + worker.terminate(); + worker = null; + } + if (Array.isArray(nextLines)) { + applyResult(nextLines, id); + return; + } + applyResult([], id); + }; + + worker.onerror = (): void => { + if (worker !== null) { + worker.terminate(); + worker = null; + } + + void runFallbackHighlight().catch(() => { + applyResult([], requestId); + }); + }; + + worker.postMessage({ id: requestId, content, language }); + + return () => { + cancelled = true; + if (worker !== null) { + worker.terminate(); + } + }; + } catch { + worker = null; + } + } + + const scheduleFallback = (): void => { + void runFallbackHighlight().catch(() => { + applyResult([], requestId); + }); }; if (typeof window !== "undefined") { @@ -104,44 +180,32 @@ const TextPreviewContent: React.FC = memo( }; if (typeof idleCallback.requestIdleCallback === "function") { - const handle = idleCallback.requestIdleCallback( - () => { - runHighlight(); - }, - { timeout: 700 }, - ); + idleHandle = idleCallback.requestIdleCallback(scheduleFallback, { timeout: 700 }); return () => { cancelled = true; - if (typeof idleCallback.cancelIdleCallback === "function") { - idleCallback.cancelIdleCallback(handle); + if (idleHandle !== null && typeof idleCallback.cancelIdleCallback === "function") { + idleCallback.cancelIdleCallback(idleHandle); } }; } } - const timer = window.setTimeout(() => { - runHighlight(); - }, 0); + timer = window.setTimeout(scheduleFallback, 0); return () => { cancelled = true; - window.clearTimeout(timer); + if (timer !== null) { + window.clearTimeout(timer); + } }; - }, [normalizedLines, language]); + }, [content, language, normalizedLines.length, shouldBypassSyntaxHighlight]); // 预先转义文本,避免滚动过程中反复计算 const escapedLines = useMemo(() => { - return normalizedLines.map((line) => { - if (line.length === 0) { - return "\u00A0"; - } - return line.replace(/&/g, "&").replace(//g, ">"); - }); + return encodeLines(normalizedLines); }, [normalizedLines]); - const charCount = useMemo(() => (typeof content === "string" ? content.length : 0), [content]); - // 计算实际字节大小(UTF-8 编码) const byteSize = useMemo(() => { if (typeof content !== "string") { diff --git a/src/components/seo/NativeSEO.tsx b/src/components/seo/NativeSEO.tsx index c84ca84..17081f1 100644 --- a/src/components/seo/NativeSEO.tsx +++ b/src/components/seo/NativeSEO.tsx @@ -1,5 +1,6 @@ import React from "react"; import { useMetadata } from "@/contexts/MetadataContext/context"; +import { buildAbsoluteAppUrl } from "@/utils/routing/basePath"; /** * NativeSEO组件属性接口 @@ -46,7 +47,7 @@ const NativeSEO: React.FC = ({ // 确保ogImage是完整URL const fullOgImageUrl = metaOgImage.startsWith("http") ? metaOgImage - : `${window.location.origin}${metaOgImage}`; + : buildAbsoluteAppUrl(metaOgImage); // 获取当前规范URL(canonical) const normalizedCanonical = normalizeString(canonical); diff --git a/src/components/ui/ErrorBoundary.tsx b/src/components/ui/ErrorBoundary.tsx index 09fc039..0cbb142 100644 --- a/src/components/ui/ErrorBoundary.tsx +++ b/src/components/ui/ErrorBoundary.tsx @@ -35,6 +35,7 @@ import { import { ErrorManager } from "@/utils/error"; import { isDeveloperMode } from "@/config"; import { useI18n } from "@/contexts/I18nContext"; +import { buildAppPath } from "@/utils/routing/basePath"; type ErrorInfo = React.ErrorInfo; @@ -159,7 +160,7 @@ class ErrorBoundary extends React.Component { - window.location.href = "/"; + window.location.href = buildAppPath(); }; override render(): React.ReactNode { diff --git a/src/hooks/github/useRepoSearch/useRepoSearch.ts b/src/hooks/github/useRepoSearch/useRepoSearch.ts index ccbf8f7..c53f000 100644 --- a/src/hooks/github/useRepoSearch/useRepoSearch.ts +++ b/src/hooks/github/useRepoSearch/useRepoSearch.ts @@ -550,7 +550,7 @@ export function useRepoSearch({ keyword, targetBranches, pathPrefixRaw, - sanitizedExtensions[0], + sanitizedExtensions, ); const allItems: RepoSearchItem[] = branchResults.flatMap( diff --git a/src/hooks/useDownload.ts b/src/hooks/useDownload.ts index 2df5ea4..16889ed 100644 --- a/src/hooks/useDownload.ts +++ b/src/hooks/useDownload.ts @@ -8,12 +8,18 @@ */ import { useReducer, useCallback, useRef } from "react"; -import JSZip from "jszip"; import { saveAs } from "file-saver"; import type { DownloadState, DownloadAction, GitHubContent } from "@/types"; import { GitHub } from "@/services/github"; import { logger } from "@/utils"; +import { isAbortError } from "@/utils/network/abort"; import { requestManager } from "@/utils/request/requestManager"; +import { + downloadFolderAsZip, + prepareZipOutputSink, + type FolderDownloadEntry, + type ZipOutputSink, +} from "@/utils/download/folderZipPipeline"; import { getForceServerProxy } from "@/services/github/config/ProxyForceManager"; import { useI18n } from "@/contexts/I18nContext"; @@ -158,7 +164,10 @@ export const useDownload = ( // 如果是非开发环境或启用了令牌模式,使用服务端API代理 if (getForceServerProxy()) { - downloadUrl = `/api/github?action=getFileContent&url=${encodeURIComponent(item.download_url)}`; + downloadUrl = GitHub.Content.getServerRepoFileProxyUrl( + item.path, + GitHub.Branch.getCurrentBranch(), + ); } const response = await fetch(downloadUrl, { signal }); @@ -213,7 +222,7 @@ export const useDownload = ( const collectFiles = useCallback( async function collectFilesInner( folderPath: string, - fileList: { path: string; url: string }[], + fileList: FolderDownloadEntry[], basePath: string, signal: AbortSignal, ): Promise { @@ -223,6 +232,7 @@ export const useDownload = ( const contents = await requestManager.request( `download-folder-${folderPath}`, (requestSignal) => GitHub.Content.getContents(folderPath, requestSignal), + { signal }, ); // 检查是否已取消 (ref可在异步期间被cancelDownload修改) @@ -246,12 +256,16 @@ export const useDownload = ( // 如果是非开发环境或启用了令牌模式,使用服务端API代理 let downloadUrl = item.download_url; if (getForceServerProxy()) { - downloadUrl = `/api/github?action=getFileContent&url=${encodeURIComponent(item.download_url)}`; + downloadUrl = GitHub.Content.getServerRepoFileProxyUrl( + item.path, + GitHub.Branch.getCurrentBranch(), + ); } fileList.push({ path: relativePath, url: downloadUrl, + size: item.size, }); } else { // 递归处理子文件夹 (type === 'dir') @@ -291,16 +305,22 @@ export const useDownload = ( // 创建新的AbortController abortControllerRef.current = new AbortController(); const signal = abortControllerRef.current.signal; + let outputSink: ZipOutputSink | null = null; + let zipPipelineStarted = false; try { - const zip = new JSZip(); + outputSink = await prepareZipOutputSink({ + archiveName: `${folderName}.zip`, + saveAsImpl: saveAs, + }); // 递归获取文件夹内容 - const allFiles: { path: string; url: string }[] = []; + const allFiles: FolderDownloadEntry[] = []; await collectFiles(path, allFiles, path, signal); // 检查是否已取消 (ref可在异步期间被cancelDownload修改) if (hasBeenCancelled()) { + await outputSink.abort(); logger.info("文件夹下载已取消"); return; } @@ -308,78 +328,39 @@ export const useDownload = ( dispatch({ type: "SET_TOTAL_FILES", count: allFiles.length }); logger.info(`需要下载的文件总数: ${String(allFiles.length)}`); - // 下载并添加到zip let processedCount = 0; - for (const file of allFiles) { - try { - // 检查是否已取消 (ref可在异步期间被cancelDownload修改) - if (hasBeenCancelled()) { - logger.info("文件夹下载已取消"); - return; - } - - const response = await fetch(file.url, { signal }); - - if (!response.ok) { - logger.error( - `文件 ${file.path} 下载失败:`, - new Error(`下载失败: ${String(response.status)}`), - ); - continue; - } - - const blob = await response.blob(); - zip.file(file.path, blob); - - processedCount++; - dispatch({ type: "SET_PROCESSING_FILES", count: processedCount }); + zipPipelineStarted = true; + await downloadFolderAsZip({ + files: allFiles, + signal, + archiveName: `${folderName}.zip`, + outputSink, + onFileComplete: (count, total) => { + processedCount = count; + dispatch({ type: "SET_PROCESSING_FILES", count }); dispatch({ type: "SET_FOLDER_PROGRESS", - progress: Math.round((processedCount / allFiles.length) * 100), + progress: total > 0 ? Math.round((count / total) * 100) : 100, }); - } catch (e) { - // 检查是否是取消导致的错误 - if (e instanceof Error && (e.name === "AbortError" || hasBeenCancelled())) { - logger.info("文件夹下载已取消"); - return; - } - logger.error(`文件 ${file.path} 下载失败:`, e); - } - - // 检查是否已取消 (ref可在异步期间被cancelDownload修改) - if (hasBeenCancelled()) { - logger.info("文件夹下载已取消"); - return; - } - } - - // 生成zip文件 - const zipBlob = await zip.generateAsync( - { - type: "blob", - compression: "DEFLATE", - compressionOptions: { level: 6 }, }, - (metadata: { percent: number }) => { - // 检查是否已取消 (ref可在异步期间被cancelDownload修改) - if (hasBeenCancelled()) { - return; - } - dispatch({ type: "SET_FOLDER_PROGRESS", progress: Math.round(metadata.percent) }); + onFileError: (file, error) => { + logger.error(`文件 ${file.path} 下载失败:`, error); }, - ); + }); - // 最后一次检查是否已取消 (ref可在异步期间被cancelDownload修改) - if (hasBeenCancelled()) { - logger.info("文件夹下载已取消"); - return; + if (!hasBeenCancelled()) { + dispatch({ type: "SET_PROCESSING_FILES", count: processedCount }); + dispatch({ type: "SET_FOLDER_PROGRESS", progress: 100 }); } - saveAs(zipBlob, `${folderName}.zip`); logger.info(`文件夹下载完成: ${path}`); } catch (e: unknown) { const error = e as Error; - if (error.name === "AbortError" || hasBeenCancelled()) { + if (outputSink !== null && !zipPipelineStarted) { + await outputSink.abort(error); + } + + if (isAbortError(error) || hasBeenCancelled()) { logger.info("文件夹下载已取消"); } else { logger.error("下载文件夹失败:", error); diff --git a/src/hooks/useFilePreview.ts b/src/hooks/useFilePreview.ts index 34482ae..ac4e806 100644 --- a/src/hooks/useFilePreview.ts +++ b/src/hooks/useFilePreview.ts @@ -189,8 +189,9 @@ export const useFilePreview = ( dispatch({ type: "RESET_PREVIEW" }); try { + const currentBranch = GitHub.Branch.getCurrentBranch(); const proxyUrl = getForceServerProxy() - ? `/api/github?action=getFileContent&url=${encodeURIComponent(item.download_url)}` + ? GitHub.Content.getServerRepoFileProxyUrl(item.path, currentBranch) : (GitHub.Proxy.transformImageUrl(item.download_url, item.path, useTokenMode) ?? item.download_url); @@ -237,6 +238,9 @@ export const useFilePreview = ( await pdf.openPDFPreview({ fileName: item.name, downloadUrl: item.download_url, + serverProxyUrl: getForceServerProxy() + ? GitHub.Content.getServerRepoFileProxyUrl(item.path, currentBranch) + : undefined, theme: muiTheme, translations: { loading: t("ui.pdf.loading"), diff --git a/src/main.tsx b/src/main.tsx index 3332cce..7be3882 100644 --- a/src/main.tsx +++ b/src/main.tsx @@ -11,8 +11,7 @@ import SEOProvider from "@/contexts/SEOContext"; import { ResponsiveSnackbarProvider } from "@/components/ui/ResponsiveSnackbarProvider"; import { getDeveloperConfig } from "@/config"; import { ErrorManager, setupGlobalErrorHandlers } from "@/utils/error"; -import { initialContentPayload } from "@/generated/initialContent"; -import type { InitialContentHydrationPayload } from "@/types"; +import { loadInitialContentPayload } from "@/services/github/core/content/initialContentLoader"; // 扩展Window接口以支持LaTeX优化清理函数 declare global { @@ -53,8 +52,6 @@ if (!allowConsoleOutput) { // 设置全局错误处理器 setupGlobalErrorHandlers(ErrorManager); -GitHub.Content.hydrate(initialContentPayload as InitialContentHydrationPayload | null | undefined); - // 应用LaTeX渲染优化 // 在应用加载后设置LaTeX优化监听器 document.addEventListener("DOMContentLoaded", (): void => { @@ -70,15 +67,22 @@ if (rootElement === null) { throw new Error('找不到根元素:请确保 HTML 中存在 id="root" 的元素'); } -ReactDOM.createRoot(rootElement).render( - // 开发环境已启用React严格模式以帮助发现潜在的错误,进行刷新后页面抽动属正常现象 - - - - - - - - - , -); +async function bootstrap(): Promise { + const initialContentPayload = await loadInitialContentPayload(); + GitHub.Content.hydrate(initialContentPayload); + + ReactDOM.createRoot(rootElement).render( + // 开发环境已启用React严格模式以帮助发现潜在的错误,进行刷新后页面抽动属正常现象 + + + + + + + + + , + ); +} + +void bootstrap(); diff --git a/src/services/github.ts b/src/services/github.ts index 2b4fd7e..4237d6c 100644 --- a/src/services/github.ts +++ b/src/services/github.ts @@ -6,13 +6,9 @@ // 导入各个服务模块 import * as ContentServiceModule from "./github/core/content"; -import * as SearchServiceModule from "./github/core/search"; -import * as SearchIndexServiceModule from "./github/core/searchIndex"; -import * as BranchServiceModule from "./github/core/BranchService"; -import * as StatsServiceModule from "./github/core/StatsService"; -import * as PrefetchServiceModule from "./github/core/PrefetchService"; import * as AuthModule from "./github/core/Auth"; import * as ConfigModule from "./github/core/Config"; +import { getSearchIndexConfig } from "@/config"; import { CacheManager as CacheManagerClass } from "./github/cache"; import { GitHubTokenManager } from "./github/TokenManager"; import { @@ -26,6 +22,52 @@ import { } from "./github/proxy"; import { RequestBatcher as RequestBatcherClass } from "./github/RequestBatcher"; +type SearchServiceModule = typeof import("./github/core/search"); +type SearchIndexServiceModule = typeof import("./github/core/searchIndex"); +type BranchServiceModule = typeof import("./github/core/BranchService"); +type StatsServiceModule = typeof import("./github/core/StatsService"); +type PrefetchServiceModule = typeof import("./github/core/PrefetchService"); + +const loadSearchService = (() => { + let modulePromise: Promise | null = null; + return (): Promise => { + modulePromise ??= import("./github/core/search"); + return modulePromise; + }; +})(); + +const loadSearchIndexService = (() => { + let modulePromise: Promise | null = null; + return (): Promise => { + modulePromise ??= import("./github/core/searchIndex"); + return modulePromise; + }; +})(); + +const loadBranchService = (() => { + let modulePromise: Promise | null = null; + return (): Promise => { + modulePromise ??= import("./github/core/BranchService"); + return modulePromise; + }; +})(); + +const loadStatsService = (() => { + let modulePromise: Promise | null = null; + return (): Promise => { + modulePromise ??= import("./github/core/StatsService"); + return modulePromise; + }; +})(); + +const loadPrefetchService = (() => { + let modulePromise: Promise | null = null; + return (): Promise => { + modulePromise ??= import("./github/core/PrefetchService"); + return modulePromise; + }; +})(); + /** * 分组导出 * @@ -44,31 +86,59 @@ export const GitHub = { Content: { getContents: ContentServiceModule.getContents, getFileContent: ContentServiceModule.getFileContent, + getServerRepoFileProxyUrl: ContentServiceModule.getServerRepoFileProxyUrl, + getServerResourceProxyUrl: ContentServiceModule.getServerResourceProxyUrl, hydrate: ContentServiceModule.hydrateInitialContent, }, /** 搜索服务 - 搜索文件和内容 */ Search: { - searchWithGitHubApi: SearchServiceModule.searchWithGitHubApi, - searchFiles: SearchServiceModule.searchFiles, - searchMultipleBranchesWithTreesApi: SearchServiceModule.searchMultipleBranchesWithTreesApi, + searchWithGitHubApi: (...args: Parameters) => + loadSearchService().then(({ searchWithGitHubApi }) => searchWithGitHubApi(...args)), + searchFiles: (...args: Parameters) => + loadSearchService().then(({ searchFiles }) => searchFiles(...args)), + searchMultipleBranchesWithTreesApi: ( + ...args: Parameters + ) => + loadSearchService().then(({ searchMultipleBranchesWithTreesApi }) => + searchMultipleBranchesWithTreesApi(...args), + ), }, /** 索引搜索服务 - 使用生成的索引进行检索 */ SearchIndex: { - isEnabled: SearchIndexServiceModule.isSearchIndexEnabled, - getManifest: SearchIndexServiceModule.getSearchIndexManifest, - ensureReady: SearchIndexServiceModule.ensureSearchIndexReady, - getIndexedBranches: SearchIndexServiceModule.getIndexedBranches, - prefetchBranch: SearchIndexServiceModule.prefetchSearchIndexForBranch, - search: SearchIndexServiceModule.searchIndex, - invalidateCache: SearchIndexServiceModule.invalidateSearchIndexCache, - refresh: SearchIndexServiceModule.refreshSearchIndex, + isEnabled: (): boolean => getSearchIndexConfig().enabled, + getManifest: (...args: Parameters) => + loadSearchIndexService().then(({ getSearchIndexManifest }) => + getSearchIndexManifest(...args), + ), + ensureReady: (...args: Parameters) => + loadSearchIndexService().then(({ ensureSearchIndexReady }) => + ensureSearchIndexReady(...args), + ), + getIndexedBranches: (...args: Parameters) => + loadSearchIndexService().then(({ getIndexedBranches }) => getIndexedBranches(...args)), + prefetchBranch: ( + ...args: Parameters + ) => + loadSearchIndexService().then(({ prefetchSearchIndexForBranch }) => + prefetchSearchIndexForBranch(...args), + ), + search: (...args: Parameters) => + loadSearchIndexService().then(({ searchIndex }) => searchIndex(...args)), + invalidateCache: (): void => { + void loadSearchIndexService().then(({ invalidateSearchIndexCache }) => { + invalidateSearchIndexCache(); + }); + }, + refresh: (...args: Parameters) => + loadSearchIndexService().then(({ refreshSearchIndex }) => refreshSearchIndex(...args)), }, /** 分支服务 - 管理 Git 分支 */ Branch: { - getBranches: BranchServiceModule.getBranches, + getBranches: (...args: Parameters) => + loadBranchService().then(({ getBranches }) => getBranches(...args)), getCurrentBranch: ConfigModule.getCurrentBranch, setCurrentBranch: ConfigModule.setCurrentBranch, getDefaultBranchName: ConfigModule.getDefaultBranch, @@ -76,17 +146,28 @@ export const GitHub = { /** 缓存服务 - 管理缓存和统计 */ Cache: { - clearCache: StatsServiceModule.clearCache, - getCacheStats: StatsServiceModule.getCacheStats, - getNetworkStats: StatsServiceModule.getNetworkStats, + clearCache: (...args: Parameters) => + loadStatsService().then(({ clearCache }) => clearCache(...args)), + getCacheStats: (): ReturnType => + CacheManagerClass.getCacheStats(), + getNetworkStats: (...args: Parameters) => + loadStatsService().then(({ getNetworkStats }) => getNetworkStats(...args)), CacheManager: CacheManagerClass, }, /** 预加载服务 - 预加载相关内容 */ Prefetch: { - prefetchContents: PrefetchServiceModule.prefetchContents, - batchPrefetchContents: PrefetchServiceModule.batchPrefetchContents, - prefetchRelatedContent: PrefetchServiceModule.prefetchRelatedContent, + prefetchContents: (...args: Parameters): void => { + void loadPrefetchService().then(({ prefetchContents }) => { + prefetchContents(...args); + }); + }, + batchPrefetchContents: (...args: Parameters) => + loadPrefetchService().then(({ batchPrefetchContents }) => batchPrefetchContents(...args)), + prefetchRelatedContent: ( + ...args: Parameters + ) => + loadPrefetchService().then(({ prefetchRelatedContent }) => prefetchRelatedContent(...args)), }, /** 认证服务 - Token 和授权管理 */ @@ -118,8 +199,5 @@ export const GitHub = { }, } as const; -// 扁平导出常用函数 -export { searchMultipleBranchesWithTreesApi } from "./github/core/search"; - // 导出类型定义 export type { ConfigInfo } from "./github/core/Config"; diff --git a/src/services/github/RequestBatcher.test.ts b/src/services/github/RequestBatcher.test.ts new file mode 100644 index 0000000..42295ba --- /dev/null +++ b/src/services/github/RequestBatcher.test.ts @@ -0,0 +1,117 @@ +import { beforeEach, describe, expect, it, vi } from "vite-plus/test"; +import { createAbortError } from "@/utils/network/abort"; + +vi.mock("@/utils", () => ({ + logger: { + debug: vi.fn(), + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + }, +})); + +import { RequestBatcher } from "./RequestBatcher"; + +declare global { + interface Window { + setInterval: typeof globalThis.setInterval; + setTimeout: typeof globalThis.setTimeout; + } +} + +beforeEach(() => { + if (typeof window === "undefined") { + vi.stubGlobal("window", globalThis); + } +}); + +describe("RequestBatcher", () => { + it("reuses fingerprint cache for identical completed requests", async () => { + const batcher = new RequestBatcher(); + const executeRequest = vi.fn(async () => ({ value: Date.now() })); + + const firstResult = await batcher.enqueue("https://example.com/repos", executeRequest, { + method: "GET", + headers: { Accept: "application/json" }, + }); + + const secondResult = await batcher.enqueue("https://example.com/repos", executeRequest, { + method: "GET", + headers: { Accept: "application/json" }, + }); + + expect(executeRequest).toHaveBeenCalledTimes(1); + expect(secondResult).toBe(firstResult); + }); + + it("bypasses fingerprint cache when requested", async () => { + const batcher = new RequestBatcher(); + const executeRequest = vi + .fn<() => Promise<{ value: number }>>() + .mockResolvedValueOnce({ value: 1 }) + .mockResolvedValueOnce({ value: 2 }); + + const firstResult = await batcher.enqueue("https://example.com/repos", executeRequest, { + method: "GET", + headers: { Accept: "application/json" }, + }); + + await new Promise((resolve) => setTimeout(resolve, 0)); + + const secondResult = await batcher.enqueue("https://example.com/repos", executeRequest, { + method: "GET", + headers: { Accept: "application/json" }, + fingerprintCache: "bypass", + }); + + expect(executeRequest).toHaveBeenCalledTimes(2); + expect(firstResult).toEqual({ value: 1 }); + expect(secondResult).toEqual({ value: 2 }); + }); + + it("still merges in-flight requests when only fingerprint cache is bypassed", async () => { + const batcher = new RequestBatcher(); + let releaseRequest: (() => void) | null = null; + const executeRequest = vi.fn( + () => + new Promise<{ value: number }>((resolve) => { + releaseRequest = () => resolve({ value: 1 }); + }), + ); + + const firstPromise = batcher.enqueue("https://example.com/repos", executeRequest, { + method: "GET", + headers: { Accept: "application/json" }, + fingerprintCache: "bypass", + }); + + const secondPromise = batcher.enqueue("https://example.com/repos", executeRequest, { + method: "GET", + headers: { Accept: "application/json" }, + fingerprintCache: "bypass", + }); + + expect(executeRequest).toHaveBeenCalledTimes(1); + releaseRequest?.(); + + const [firstResult, secondResult] = await Promise.all([firstPromise, secondPromise]); + expect(firstResult).toEqual({ value: 1 }); + expect(secondResult).toEqual({ value: 1 }); + }); + + it("does not retry aborted requests", async () => { + const batcher = new RequestBatcher(); + const executeRequest = vi.fn(async () => { + throw createAbortError("Request aborted"); + }); + + await expect( + batcher.enqueue("https://example.com/repos", executeRequest, { + method: "GET", + headers: { Accept: "application/json" }, + }), + ).rejects.toMatchObject({ name: "AbortError" }); + + expect(executeRequest).toHaveBeenCalledTimes(1); + }); +}); diff --git a/src/services/github/RequestBatcher.ts b/src/services/github/RequestBatcher.ts index e4730c7..c5975b1 100644 --- a/src/services/github/RequestBatcher.ts +++ b/src/services/github/RequestBatcher.ts @@ -1,4 +1,5 @@ import { logger } from "@/utils"; +import { isAbortError } from "@/utils/network/abort"; import { createTimeWheel } from "@/utils/data-structures/TimeWheel"; import type { TimeWheel } from "@/utils/data-structures/TimeWheel"; @@ -32,6 +33,9 @@ interface RetryOptions { silent?: boolean; } +type FingerprintCachePolicy = "use" | "bypass"; +type InFlightDeduplicationPolicy = "merge" | "isolate"; + /** * 请求批处理器类 * @@ -127,7 +131,8 @@ export class RequestBatcher { * @param options.priority - 请求优先级,默认为'medium' * @param options.method - HTTP方法,默认为'GET' * @param options.headers - 请求头 - * @param options.skipDeduplication - 是否跳过去重检查 + * @param options.fingerprintCache - 是否复用已完成请求的指纹缓存 + * @param options.inFlightDeduplication - 是否合并相同的进行中请求 * @returns Promise,解析为请求结果 */ public enqueue( @@ -137,26 +142,32 @@ export class RequestBatcher { priority?: "high" | "medium" | "low"; method?: string; headers?: Record; - skipDeduplication?: boolean; + fingerprintCache?: FingerprintCachePolicy; + inFlightDeduplication?: InFlightDeduplicationPolicy; } = {}, ): Promise { const { priority = "medium", method = "GET", headers = {}, - skipDeduplication = false, + fingerprintCache = "use", + inFlightDeduplication = "merge", } = options; + const requestKey = + inFlightDeduplication === "merge" + ? key + : `${key}::${method}:${Date.now().toString()}:${Math.random().toString(36).slice(2)}`; // 检查是否有重复请求正在进行 - if (this.pendingRequests.has(key)) { + if (inFlightDeduplication === "merge" && this.pendingRequests.has(requestKey)) { logger.debug(`请求合并: ${key}`); - return this.pendingRequests.get(key) as Promise; + return this.pendingRequests.get(requestKey) as Promise; } - // 生成请求指纹并检查缓存,避免不必要的指纹生成 + // 生成请求指纹并检查结果缓存,避免不必要的网络请求 const fingerprint = this.generateFingerprint(key, method, headers); - if (!skipDeduplication) { + if (fingerprintCache === "use") { const cachedData = this.fingerprintWheel.get(fingerprint); if (cachedData !== undefined) { // 增加命中次数 @@ -168,8 +179,8 @@ export class RequestBatcher { return new Promise((resolve, reject) => { // 如果还没有这个键的请求队列,创建它 - if (!this.batchedRequests.has(key)) { - this.batchedRequests.set(key, []); + if (!this.batchedRequests.has(requestKey)) { + this.batchedRequests.set(requestKey, []); // 设置超时以批量处理请求 this.batchTimeout ??= window.setTimeout(() => { @@ -178,7 +189,7 @@ export class RequestBatcher { } // 添加到队列 - const queue = this.batchedRequests.get(key); + const queue = this.batchedRequests.get(requestKey); if (queue === undefined) { reject(new Error(`队列不存在: ${key}`)); return; @@ -195,39 +206,55 @@ export class RequestBatcher { // 如果是队列中的第一个请求,执行它 if (isFirstRequest) { - void this.executeWithRetry(key, executeRequest, fingerprint, skipDeduplication); + void this.executeWithRetry( + requestKey, + key, + executeRequest, + fingerprint, + fingerprintCache, + ).catch(() => { + // 请求结果会通过队列中的 promise 向调用方传递,这里只避免未处理 rejection。 + }); } }); } // 带重试机制的请求执行 private async executeWithRetry( + requestKey: string, key: string, executeRequest: () => Promise, fingerprint: string, - skipDeduplication: boolean, - ): Promise { - const requestPromise = this.performRequest(key, executeRequest, fingerprint, skipDeduplication); + fingerprintCache: FingerprintCachePolicy, + ): Promise { + const requestPromise = this.performRequest( + requestKey, + key, + executeRequest, + fingerprint, + fingerprintCache, + ); // 添加到进行中的请求 - this.pendingRequests.set(key, requestPromise); + this.pendingRequests.set(requestKey, requestPromise); try { - await requestPromise; + return await requestPromise; } finally { // 清理进行中的请求记录 - this.pendingRequests.delete(key); + this.pendingRequests.delete(requestKey); } } // 执行请求 private async performRequest( + requestKey: string, key: string, executeRequest: () => Promise, fingerprint: string, - skipDeduplication: boolean, - ): Promise { - const queue = this.batchedRequests.get(key) ?? []; + fingerprintCache: FingerprintCachePolicy, + ): Promise { + const queue = this.batchedRequests.get(requestKey) ?? []; // 按优先级排序 queue.sort((a, b) => { @@ -239,6 +266,7 @@ export class RequestBatcher { const retryOptions: RetryOptions = { maxRetries: this.maxRetries, backoff: (attempt) => Math.min(1000 * Math.pow(2, attempt), 5000), // 指数退避,最大5秒 + shouldRetry: (error) => !isAbortError(error), onRetry: (attempt, error) => { const errorMessage = error instanceof Error ? error.message : String(error); logger.warn( @@ -253,7 +281,7 @@ export class RequestBatcher { const result = await this.withRetry(executeRequest, retryOptions); // 缓存成功响应的结果 - if (!skipDeduplication) { + if (fingerprintCache === "use") { this.fingerprintWheel.add( fingerprint, { @@ -268,7 +296,8 @@ export class RequestBatcher { queue.forEach((request) => { request.resolve(result); }); - this.batchedRequests.delete(key); + this.batchedRequests.delete(requestKey); + return result; } catch (lastError: unknown) { // 所有重试都失败了 logger.error(`请求最终失败: ${key}`, lastError); @@ -276,7 +305,8 @@ export class RequestBatcher { request.retryCount++; request.reject(lastError); }); - this.batchedRequests.delete(key); + this.batchedRequests.delete(requestKey); + throw lastError; } } diff --git a/src/services/github/core/PrefetchService.ts b/src/services/github/core/PrefetchService.ts index 3ef6d03..1115e60 100644 --- a/src/services/github/core/PrefetchService.ts +++ b/src/services/github/core/PrefetchService.ts @@ -10,6 +10,7 @@ import type { GitHubContent } from "@/types"; import { logger } from "@/utils"; import { CacheManager } from "../cache"; +import { getContents } from "./content"; import { prefetchFilesWithPriority, selectPriorityDirectories, @@ -29,16 +30,9 @@ export function prefetchContents(path: string, priority: "high" | "medium" | "lo // 使用低优先级预加载,不影响用户操作 const delay = priority === "high" ? 0 : priority === "medium" ? 100 : 200; setTimeout(() => { - // 动态导入避免循环依赖 - void import("./content") - .then(({ getContents }) => { - void getContents(path).catch(() => { - // 忽略错误 - }); - }) - .catch(() => { - // 忽略动态导入错误 - }); + void getContents(path).catch(() => { + // 忽略错误 + }); }, delay); } @@ -56,9 +50,6 @@ export async function batchPrefetchContents(paths: string[], maxConcurrency = 3) return; } - // 动态导入避免循环依赖 - const { getContents } = await import("./content"); - // 限制并发数量防止网络资源过耗 for (let i = 0; i < paths.length; i += maxConcurrency) { const batch = paths.slice(i, i + maxConcurrency); diff --git a/src/services/github/core/StatsService.ts b/src/services/github/core/StatsService.ts index 8ac337e..b795b7e 100644 --- a/src/services/github/core/StatsService.ts +++ b/src/services/github/core/StatsService.ts @@ -9,6 +9,8 @@ import { CacheManager } from "../cache"; import { getProxyHealthStats, resetFailedProxyServices } from "../proxy"; +import { clearBatcherCache, getBatcher } from "./content"; +import { clearBranchTreeCache } from "./search/trees"; /** * 清除所有缓存和重置网络状态 @@ -19,8 +21,8 @@ import { getProxyHealthStats, resetFailedProxyServices } from "../proxy"; */ export async function clearCache(): Promise { await CacheManager.clearAllCaches(); - const { clearBatcherCache } = await import("./content"); clearBatcherCache(); + clearBranchTreeCache(); resetFailedProxyServices(); } @@ -45,7 +47,6 @@ export async function getNetworkStats(): Promise<{ proxy: ReturnType; cache: ReturnType; }> { - const { getBatcher } = await import("./content"); return { batcher: getBatcher().getStats(), proxy: getProxyHealthStats(), diff --git a/src/services/github/core/content/index.ts b/src/services/github/core/content/index.ts index ddd75d1..b6af2f0 100644 --- a/src/services/github/core/content/index.ts +++ b/src/services/github/core/content/index.ts @@ -7,6 +7,8 @@ export { getContents, getFileContent, + getServerRepoFileProxyUrl, + getServerResourceProxyUrl, getBatcher, clearBatcherCache, hydrateInitialContent, diff --git a/src/services/github/core/content/initialContentLoader.test.ts b/src/services/github/core/content/initialContentLoader.test.ts new file mode 100644 index 0000000..6280095 --- /dev/null +++ b/src/services/github/core/content/initialContentLoader.test.ts @@ -0,0 +1,143 @@ +import { beforeEach, describe, expect, it, vi } from "vite-plus/test"; + +vi.mock("@/config", () => ({ + getGithubConfig: () => ({ + repoOwner: "octo", + repoName: "repo-viewer", + repoBranch: "main", + }), +})); + +vi.mock("@/utils", () => ({ + logger: { + debug: vi.fn(), + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + }, +})); + +import { __initialContentLoaderTestUtils, loadInitialContentPayload } from "./initialContentLoader"; + +describe("initialContentLoader", () => { + beforeEach(() => { + vi.unstubAllGlobals(); + }); + + it("validates manifest shape", () => { + expect( + __initialContentLoaderTestUtils.isInitialContentManifest({ + version: 1, + generatedAt: "2026-03-15T00:00:00.000Z", + repo: { + owner: "octo", + name: "repo-viewer", + }, + branches: { + main: { + payloadPath: "/initial-content/main.json", + }, + }, + }), + ).toBe(true); + + expect( + __initialContentLoaderTestUtils.isInitialContentManifest({ + version: 1, + generatedAt: "2026-03-15T00:00:00.000Z", + repo: { + owner: "octo", + }, + branches: {}, + }), + ).toBe(false); + }); + + it("returns null when manifest is missing", async () => { + const fetchMock = vi.fn(async () => new Response(null, { status: 404 })); + vi.stubGlobal("fetch", fetchMock); + + await expect(loadInitialContentPayload()).resolves.toBeNull(); + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(fetchMock).toHaveBeenCalledWith("/initial-content/manifest.json", { + method: "GET", + signal: null, + }); + }); + + it("loads payload for the configured branch", async () => { + const fetchMock = vi.fn(async (input: RequestInfo | URL) => { + const url = + input instanceof URL ? input.toString() : typeof input === "string" ? input : input.url; + + if (url === "/initial-content/manifest.json") { + return new Response( + JSON.stringify({ + version: 1, + generatedAt: "2026-03-15T00:00:00.000Z", + repo: { + owner: "octo", + name: "repo-viewer", + }, + branches: { + main: { + payloadPath: "/initial-content/main.json", + }, + }, + }), + { status: 200 }, + ); + } + + if (url === "/initial-content/main.json") { + return new Response( + JSON.stringify({ + version: 1, + generatedAt: "2026-03-15T00:00:00.000Z", + branch: "main", + repo: { + owner: "octo", + name: "repo-viewer", + }, + directories: [ + { + path: "", + contents: [], + }, + ], + files: [], + metadata: { + allowReadmeHydration: true, + }, + }), + { status: 200 }, + ); + } + + return new Response(null, { status: 404 }); + }); + + vi.stubGlobal("fetch", fetchMock); + + await expect(loadInitialContentPayload()).resolves.toEqual({ + version: 1, + generatedAt: "2026-03-15T00:00:00.000Z", + branch: "main", + repo: { + owner: "octo", + name: "repo-viewer", + }, + directories: [ + { + path: "", + contents: [], + }, + ], + files: [], + metadata: { + allowReadmeHydration: true, + }, + }); + expect(fetchMock).toHaveBeenCalledTimes(2); + }); +}); diff --git a/src/services/github/core/content/initialContentLoader.ts b/src/services/github/core/content/initialContentLoader.ts new file mode 100644 index 0000000..56fd1f1 --- /dev/null +++ b/src/services/github/core/content/initialContentLoader.ts @@ -0,0 +1,173 @@ +import { getGithubConfig } from "@/config"; +import type { InitialContentHydrationPayload } from "@/types"; +import { logger } from "@/utils"; +import { buildAbsoluteAppUrl } from "@/utils/routing/basePath"; + +interface InitialContentManifestBranchEntry { + payloadPath: string; +} + +interface InitialContentManifest { + version: number; + generatedAt: string; + repo: { + owner: string; + name: string; + }; + branches: Record; +} + +const INITIAL_CONTENT_MANIFEST_PATH = "/initial-content/manifest.json"; + +const isRecord = (value: unknown): value is Record => + typeof value === "object" && value !== null; + +const isInitialContentManifest = (value: unknown): value is InitialContentManifest => { + if (!isRecord(value) || !isRecord(value.repo) || !isRecord(value.branches)) { + return false; + } + + if ( + typeof value.version !== "number" || + typeof value.generatedAt !== "string" || + typeof value.repo.owner !== "string" || + typeof value.repo.name !== "string" + ) { + return false; + } + + return Object.values(value.branches).every( + (entry) => isRecord(entry) && typeof entry.payloadPath === "string", + ); +}; + +const isInitialContentPayload = (value: unknown): value is InitialContentHydrationPayload => { + if (!isRecord(value) || !isRecord(value.repo)) { + return false; + } + + if ( + typeof value.version !== "number" || + typeof value.generatedAt !== "string" || + typeof value.branch !== "string" || + typeof value.repo.owner !== "string" || + typeof value.repo.name !== "string" || + !Array.isArray(value.directories) || + !Array.isArray(value.files) + ) { + return false; + } + + return value.directories.every( + (directory) => + isRecord(directory) && + typeof directory.path === "string" && + Array.isArray(directory.contents), + ); +}; + +const resolveInitialContentUrl = (path: string): string => buildAbsoluteAppUrl(path); + +export async function fetchInitialContentManifest( + signal?: AbortSignal, +): Promise { + const manifestUrl = resolveInitialContentUrl(INITIAL_CONTENT_MANIFEST_PATH); + + try { + const response = await fetch(manifestUrl, { + method: "GET", + signal: signal ?? null, + }); + + if (response.status === 404) { + logger.debug("[InitialContent] Manifest not found, skipping preload."); + return null; + } + + if (!response.ok) { + logger.warn(`[InitialContent] Failed to fetch manifest: ${response.status}`); + return null; + } + + const data: unknown = await response.json(); + if (!isInitialContentManifest(data)) { + logger.warn("[InitialContent] Manifest validation failed."); + return null; + } + + return data; + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + logger.warn(`[InitialContent] Manifest fetch failed: ${message}`); + return null; + } +} + +export async function loadInitialContentPayload( + signal?: AbortSignal, +): Promise { + const githubConfig = getGithubConfig(); + const manifest = await fetchInitialContentManifest(signal); + + if (manifest === null) { + return null; + } + + if ( + manifest.repo.owner !== githubConfig.repoOwner || + manifest.repo.name !== githubConfig.repoName + ) { + logger.warn("[InitialContent] Manifest repo mismatch, skipping preload."); + return null; + } + + const entry = manifest.branches[githubConfig.repoBranch]; + if (entry === undefined) { + logger.debug(`[InitialContent] No preload entry for branch ${githubConfig.repoBranch}.`); + return null; + } + + try { + const response = await fetch(resolveInitialContentUrl(entry.payloadPath), { + method: "GET", + signal: signal ?? null, + }); + + if (response.status === 404) { + logger.debug(`[InitialContent] Payload not found for branch ${githubConfig.repoBranch}.`); + return null; + } + + if (!response.ok) { + logger.warn(`[InitialContent] Failed to fetch payload: ${response.status}`); + return null; + } + + const data: unknown = await response.json(); + if (!isInitialContentPayload(data)) { + logger.warn("[InitialContent] Payload validation failed."); + return null; + } + + if ( + data.branch !== githubConfig.repoBranch || + data.repo.owner !== githubConfig.repoOwner || + data.repo.name !== githubConfig.repoName + ) { + logger.warn("[InitialContent] Payload repo or branch mismatch, skipping preload."); + return null; + } + + return data; + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + logger.warn(`[InitialContent] Payload fetch failed: ${message}`); + return null; + } +} + +export const __initialContentLoaderTestUtils = { + isInitialContentManifest, + isInitialContentPayload, + resolveInitialContentUrl, +}; diff --git a/src/services/github/core/content/serverApiUrls.ts b/src/services/github/core/content/serverApiUrls.ts new file mode 100644 index 0000000..210cc74 --- /dev/null +++ b/src/services/github/core/content/serverApiUrls.ts @@ -0,0 +1,115 @@ +import { GITHUB_REPO_NAME, GITHUB_REPO_OWNER, getCurrentBranch } from "../Config"; + +interface ParsedConfiguredRepoRawUrl { + branch: string; + path: string; +} + +const RAW_GITHUB_HOST = "raw.githubusercontent.com"; + +const decodePathSegment = (segment: string): string => { + try { + return decodeURIComponent(segment); + } catch { + return segment; + } +}; + +const sanitizeFilePath = (filePath: string): string => filePath.trim().replace(/^\/+/, ""); + +const splitBranchSegments = (branch: string): string[] => { + const normalized = branch.trim(); + if (normalized === "") { + return []; + } + return normalized.split("/").filter((segment) => segment.length > 0); +}; + +export function buildRepoFileContentApiUrl(filePath: string, branch?: string): string { + const params = new URLSearchParams(); + params.set("action", "getFileContent"); + params.set("path", sanitizeFilePath(filePath)); + + const branchToUse = (branch ?? getCurrentBranch()).trim(); + if (branchToUse.length > 0) { + params.set("branch", branchToUse); + } + + return `/api/github?${params.toString()}`; +} + +export function buildGitHubAssetApiUrl(url: string): string { + const params = new URLSearchParams(); + params.set("action", "getGitHubAsset"); + params.set("url", url); + return `/api/github?${params.toString()}`; +} + +export function parseConfiguredRepoRawUrl( + rawUrl: string, + preferredBranch = getCurrentBranch(), +): ParsedConfiguredRepoRawUrl | null { + try { + const parsed = new URL(rawUrl); + if (parsed.hostname !== RAW_GITHUB_HOST) { + return null; + } + + const segments = parsed.pathname + .split("/") + .filter((segment) => segment.length > 0) + .map((segment) => decodePathSegment(segment)); + + if (segments.length < 4) { + return null; + } + + if (segments[0] !== GITHUB_REPO_OWNER || segments[1] !== GITHUB_REPO_NAME) { + return null; + } + + const branchAndPath = segments.slice(2); + const preferredBranchSegments = splitBranchSegments(preferredBranch); + + if ( + preferredBranchSegments.length > 0 && + branchAndPath.length > preferredBranchSegments.length + ) { + const isPreferredBranch = preferredBranchSegments.every( + (segment, index) => branchAndPath[index] === segment, + ); + + if (isPreferredBranch) { + const path = branchAndPath.slice(preferredBranchSegments.length).join("/"); + if (path.length > 0) { + return { + branch: preferredBranch, + path, + }; + } + } + } + + const fallbackBranch = branchAndPath[0] ?? ""; + const fallbackPath = branchAndPath.slice(1).join("/"); + if (fallbackBranch === "" || fallbackPath === "") { + return null; + } + + return { + branch: fallbackBranch, + path: fallbackPath, + }; + } catch { + return null; + } +} + +export function buildServerApiUrlForGitHubResource(url: string, preferredBranch?: string): string { + const parsedRepoFile = parseConfiguredRepoRawUrl(url, preferredBranch ?? getCurrentBranch()); + if (parsedRepoFile !== null) { + return buildRepoFileContentApiUrl(parsedRepoFile.path, parsedRepoFile.branch); + } + + return buildGitHubAssetApiUrl(url); +} diff --git a/src/services/github/core/content/service.test.ts b/src/services/github/core/content/service.test.ts new file mode 100644 index 0000000..036ee59 --- /dev/null +++ b/src/services/github/core/content/service.test.ts @@ -0,0 +1,149 @@ +import { beforeEach, describe, expect, it, vi } from "vite-plus/test"; + +import { createAbortError } from "@/utils/network/abort"; + +const { axiosGetMock } = vi.hoisted(() => ({ + axiosGetMock: vi.fn(), +})); + +vi.mock("axios", () => ({ + default: { + get: axiosGetMock, + }, +})); + +vi.mock("@/utils", () => ({ + logger: { + debug: vi.fn(), + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + }, +})); + +vi.mock("../../config", () => ({ + getForceServerProxy: vi.fn(() => false), + shouldUseServerAPI: vi.fn(() => false), +})); + +vi.mock("../Auth", () => ({ + getAuthHeaders: vi.fn(() => ({ Authorization: "Bearer test-token" })), +})); + +vi.mock("../Config", () => ({ + USE_TOKEN_MODE: false, + getApiUrl: vi.fn((path: string, branch: string) => `https://api.example.com/${branch}/${path}`), + getCurrentBranch: vi.fn(() => "main"), +})); + +vi.mock("../../schemas", () => ({ + safeValidateGitHubContentsResponse: vi.fn((data: unknown) => ({ success: true, data })), + filterAndNormalizeGitHubContents: vi.fn((data: unknown) => data), + transformGitHubContentsResponse: vi.fn((data: unknown) => data), + validateGitHubContentsArray: vi.fn(() => ({ isValid: true, invalidItems: [] })), +})); + +vi.mock("./cacheState", () => ({ + ensureCacheInitialized: vi.fn(async () => {}), + getCachedDirectoryContents: vi.fn(async () => null), + getCachedFileContent: vi.fn(async () => null), + isCacheAvailable: vi.fn(() => false), + storeDirectoryContents: vi.fn(async () => {}), + storeFileContent: vi.fn(async () => {}), +})); + +vi.mock("./cacheKeys", () => ({ + buildContentsCacheKey: vi.fn((path: string, branch: string) => `${branch}:${path}`), +})); + +vi.mock("./hydrationStore", () => ({ + consumeHydratedDirectory: vi.fn(async () => null), + consumeHydratedFile: vi.fn(async () => null), + hydrateInitialContent: vi.fn(), + INITIAL_CONTENT_EXCLUDE_FILES: [], +})); + +if (typeof window === "undefined") { + vi.stubGlobal("window", globalThis); +} + +import { shouldUseServerAPI } from "../../config"; +const { clearBatcherCache, getContents } = await import("./service"); + +describe("content service abort handling", () => { + beforeEach(() => { + vi.clearAllMocks(); + vi.unstubAllGlobals(); + vi.stubGlobal("window", globalThis); + clearBatcherCache(); + vi.mocked(shouldUseServerAPI).mockReturnValue(false); + }); + + it("propagates abort to direct fetch requests", async () => { + let fetchSignal: AbortSignal | undefined; + let resolveFetchStarted: (() => void) | null = null; + const fetchStarted = new Promise((resolve) => { + resolveFetchStarted = resolve; + }); + const fetchMock = vi.fn((_: RequestInfo | URL, init?: RequestInit) => { + fetchSignal = init?.signal; + resolveFetchStarted?.(); + + return new Promise((_, reject) => { + fetchSignal?.addEventListener("abort", () => reject(createAbortError("Request aborted")), { + once: true, + }); + }); + }); + const controller = new AbortController(); + vi.stubGlobal("fetch", fetchMock); + + const promise = getContents("docs", controller.signal); + await fetchStarted; + controller.abort(); + + await expect(promise).rejects.toMatchObject({ name: "AbortError" }); + expect(fetchMock).toHaveBeenCalledTimes(1); + expect(fetchSignal?.aborted).toBe(true); + }); + + it("propagates abort to server proxy axios requests", async () => { + vi.mocked(shouldUseServerAPI).mockReturnValue(true); + const controller = new AbortController(); + let resolveAxiosStarted: (() => void) | null = null; + const axiosStarted = new Promise((resolve) => { + resolveAxiosStarted = resolve; + }); + + axiosGetMock.mockImplementationOnce(async (_url, config) => { + const signal = config?.signal as AbortSignal | undefined; + resolveAxiosStarted?.(); + + return new Promise((_, reject) => { + signal?.addEventListener( + "abort", + () => + reject( + Object.assign(new Error("canceled"), { + name: "CanceledError", + code: "ERR_CANCELED", + }), + ), + { once: true }, + ); + }); + }); + + const promise = getContents("docs", controller.signal); + await axiosStarted; + controller.abort(); + + await expect(promise).rejects.toMatchObject({ name: "AbortError" }); + expect(axiosGetMock).toHaveBeenCalledWith( + "/api/github?action=getContents&path=docs&branch=main", + { + signal: controller.signal, + }, + ); + }); +}); diff --git a/src/services/github/core/content/service.ts b/src/services/github/core/content/service.ts index 7c282af..3762f11 100644 --- a/src/services/github/core/content/service.ts +++ b/src/services/github/core/content/service.ts @@ -2,6 +2,7 @@ import axios from "axios"; import type { GitHubContent, InitialContentHydrationPayload } from "@/types"; import { logger } from "@/utils"; +import { createAbortError, isAbortError } from "@/utils/network/abort"; import { RequestBatcher } from "../../RequestBatcher"; import { getForceServerProxy, shouldUseServerAPI } from "../../config"; @@ -28,6 +29,7 @@ import { hydrateInitialContent as hydratePayload, INITIAL_CONTENT_EXCLUDE_FILES, } from "./hydrationStore"; +import { buildRepoFileContentApiUrl, buildServerApiUrlForGitHubResource } from "./serverApiUrls"; /** * 内容服务入口 @@ -96,7 +98,7 @@ export async function getContents( query.set("action", "getContents"); query.set("path", path); query.set("branch", branch); - const { data } = await axios.get(`/api/github?${query.toString()}`); + const { data } = await axios.get(`/api/github?${query.toString()}`, { signal }); rawData = data; logger.debug(`通过服务端API获取内容: ${path}`); } else { @@ -128,6 +130,7 @@ export async function getContents( priority: "high", method: "GET", headers: getAuthHeaders() as Record, + fingerprintCache: forceRefresh ? "bypass" : "use", }, ); @@ -158,6 +161,10 @@ export async function getContents( return contents; } catch (unknownError) { const cause = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + if (isAbortError(cause)) { + throw createAbortError("Request aborted"); + } + logger.error(`获取内容失败: ${path}`, cause); throw new Error(`获取内容失败: ${cause.message}`); } @@ -192,7 +199,7 @@ export async function getFileContent(fileUrl: string): Promise { try { const response = await (async () => { if (getForceServerProxy()) { - const serverApiUrl = `/api/github?action=getFileContent&url=${encodeURIComponent(fileUrl)}`; + const serverApiUrl = buildServerApiUrlForGitHubResource(fileUrl, branch); return fetch(serverApiUrl); } @@ -219,11 +226,37 @@ export async function getFileContent(fileUrl: string): Promise { return content; } catch (unknownError) { const cause = unknownError instanceof Error ? unknownError : new Error(String(unknownError)); + if (isAbortError(cause)) { + throw createAbortError("Request aborted"); + } + logger.error(`获取文件内容失败: ${fileUrl}`, cause); throw new Error(`获取文件内容失败: ${cause.message}`); } } +/** + * 构建仓库文件的服务端代理地址。 + * + * @param filePath - 仓库内文件路径 + * @param branch - 可选分支,未传时使用当前分支 + * @returns 服务端代理 URL + */ +export function getServerRepoFileProxyUrl(filePath: string, branch?: string): string { + return buildRepoFileContentApiUrl(filePath, branch); +} + +/** + * 根据资源 URL 构建服务端代理地址。 + * + * @param fileUrl - 原始资源地址 + * @param branch - 可选分支,用于解析当前仓库 raw 链接 + * @returns 服务端代理 URL + */ +export function getServerResourceProxyUrl(fileUrl: string, branch?: string): string { + return buildServerApiUrlForGitHubResource(fileUrl, branch); +} + /** * 获取批处理器实例。 * diff --git a/src/services/github/core/prefetch/executor.ts b/src/services/github/core/prefetch/executor.ts index ed307b0..b410efc 100644 --- a/src/services/github/core/prefetch/executor.ts +++ b/src/services/github/core/prefetch/executor.ts @@ -2,6 +2,7 @@ import type { GitHubContent } from "@/types"; import { RequestBatcher } from "../../RequestBatcher"; import { getAuthHeaders } from "../Auth"; +import { getFileContent } from "../content"; const batcher = new RequestBatcher(); @@ -24,8 +25,6 @@ export async function prefetchFilesWithPriority( return; } - const { getFileContent } = await import("../content"); - const prefetchPromises = fileUrls.map((url) => batcher .enqueue( @@ -38,7 +37,7 @@ export async function prefetchFilesWithPriority( priority, method: "GET", headers: getAuthHeaders() as Record, - skipDeduplication: false, + fingerprintCache: "use", }, ) .catch(() => null), diff --git a/src/services/github/core/search/local.test.ts b/src/services/github/core/search/local.test.ts new file mode 100644 index 0000000..edef710 --- /dev/null +++ b/src/services/github/core/search/local.test.ts @@ -0,0 +1,57 @@ +import { beforeEach, describe, expect, it, vi } from "vite-plus/test"; + +vi.mock("@/utils", () => ({ + logger: { + debug: vi.fn(), + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + }, +})); + +vi.mock("./trees", () => ({ + getBranchTree: vi.fn(), +})); + +vi.mock("../content", () => ({ + getContents: vi.fn(), +})); + +import { searchMultipleBranchesWithTreesApi } from "./local"; +import { getBranchTree } from "./trees"; + +const mockedGetBranchTree = vi.mocked(getBranchTree); + +describe("searchMultipleBranchesWithTreesApi", () => { + beforeEach(() => { + mockedGetBranchTree.mockReset(); + }); + + it("applies all extension filters in Trees API fallback", async () => { + mockedGetBranchTree.mockResolvedValue([ + { path: "docs/readme.md", type: "blob", sha: "1", url: "https://example.com/1" }, + { path: "src/readme.ts", type: "blob", sha: "2", url: "https://example.com/2" }, + { path: "src/readme.js", type: "blob", sha: "3", url: "https://example.com/3" }, + { path: "docs/guide.md", type: "blob", sha: "4", url: "https://example.com/4" }, + ]); + + const results = await searchMultipleBranchesWithTreesApi("readme", ["main"], "", ["md", ".TS"]); + + expect(results).toHaveLength(1); + expect(results[0]?.results.map((item) => item.path)).toEqual([ + "docs/readme.md", + "src/readme.ts", + ]); + }); + + it("keeps single-extension filtering behavior compatible", async () => { + mockedGetBranchTree.mockResolvedValue([ + { path: "src/component.tsx", type: "blob", sha: "1", url: "https://example.com/1" }, + { path: "src/component.ts", type: "blob", sha: "2", url: "https://example.com/2" }, + ]); + + const results = await searchMultipleBranchesWithTreesApi("component", ["main"], "", "tsx"); + + expect(results[0]?.results.map((item) => item.path)).toEqual(["src/component.tsx"]); + }); +}); diff --git a/src/services/github/core/search/local.ts b/src/services/github/core/search/local.ts index be50f92..947e5ea 100644 --- a/src/services/github/core/search/local.ts +++ b/src/services/github/core/search/local.ts @@ -11,8 +11,27 @@ import type { GitHubContent } from "@/types"; import { logger } from "@/utils"; import { GITHUB_REPO_NAME, GITHUB_REPO_OWNER } from "../Config"; +import { getContents } from "../content"; import type { GitTreeItem } from "./trees"; +type FileTypeFilter = string | string[] | undefined; + +function normalizeFileTypeFilters(fileTypeFilter: FileTypeFilter): Set { + const values = Array.isArray(fileTypeFilter) ? fileTypeFilter : [fileTypeFilter]; + const normalized = new Set(); + + for (const value of values) { + const trimmed = value?.trim().toLowerCase(); + if (trimmed === undefined || trimmed === "") { + continue; + } + + normalized.add(trimmed.startsWith(".") ? trimmed.slice(1) : trimmed); + } + + return normalized; +} + /** * 加载目录内容 * @@ -20,7 +39,6 @@ import type { GitTreeItem } from "./trees"; * @returns Promise,解析为目录内容数组 */ async function loadDirectoryContents(path: string): Promise { - const { getContents } = await import("../content"); return getContents(path); } @@ -31,13 +49,15 @@ async function loadDirectoryContents(path: string): Promise { * @param fileTypeFilter - 文件类型过滤器(扩展名) * @returns 如果匹配或无需过滤返回 true */ -function matchesFileType(file: GitHubContent, fileTypeFilter?: string): boolean { - if (fileTypeFilter === undefined || fileTypeFilter === "" || file.type !== "file") { +function matchesFileType(file: GitHubContent, fileTypeFilter?: FileTypeFilter): boolean { + const filters = normalizeFileTypeFilters(fileTypeFilter); + + if (filters.size === 0 || file.type !== "file") { return true; } const extension = file.name.split(".").pop()?.toLowerCase(); - return extension === fileTypeFilter.toLowerCase(); + return extension !== undefined && filters.has(extension); } /** @@ -51,7 +71,7 @@ function matchesFileType(file: GitHubContent, fileTypeFilter?: string): boolean function filterFilesByName( contents: GitHubContent[], searchTerm: string, - fileTypeFilter?: string, + fileTypeFilter?: FileTypeFilter, ): GitHubContent[] { const normalizedSearchTerm = searchTerm.trim().toLowerCase(); @@ -76,7 +96,7 @@ function filterFilesByName( async function searchSubdirectories( directories: GitHubContent[], searchTerm: string, - fileTypeFilter?: string, + fileTypeFilter?: FileTypeFilter, ): Promise { if (directories.length === 0) { return []; @@ -112,7 +132,7 @@ export async function searchFiles( searchTerm: string, currentPath = "", recursive = false, - fileTypeFilter?: string, + fileTypeFilter?: FileTypeFilter, ): Promise { if (searchTerm.trim() === "") { return []; @@ -154,7 +174,7 @@ export async function searchMultipleBranchesWithTreesApi( searchTerm: string, branches: string[], pathPrefix = "", - fileTypeFilter?: string, + fileTypeFilter?: FileTypeFilter, ): Promise<{ branch: string; results: GitHubContent[] }[]> { const searchPromises = branches.map(async (branch) => ({ branch, @@ -180,7 +200,7 @@ async function searchBranchWithTreesApi( searchTerm: string, branch: string, pathPrefix = "", - fileTypeFilter?: string, + fileTypeFilter?: FileTypeFilter, ): Promise { try { const { getBranchTree } = await import("./trees"); @@ -192,6 +212,7 @@ async function searchBranchWithTreesApi( const normalizedSearchTerm = searchTerm.trim().toLowerCase(); const normalizedPrefix = pathPrefix.trim().toLowerCase(); + const normalizedFileTypeFilters = normalizeFileTypeFilters(fileTypeFilter); return tree .filter((item: GitTreeItem) => item.type === "blob") @@ -209,11 +230,11 @@ async function searchBranchWithTreesApi( return false; } - if (fileTypeFilter !== undefined && fileTypeFilter !== "") { + if (normalizedFileTypeFilters.size > 0) { const ext = fileName.includes(".") ? fileName.slice(fileName.lastIndexOf(".") + 1).toLowerCase() : ""; - if (ext !== fileTypeFilter.toLowerCase()) { + if (!normalizedFileTypeFilters.has(ext)) { return false; } } diff --git a/src/services/github/core/search/trees.test.ts b/src/services/github/core/search/trees.test.ts new file mode 100644 index 0000000..f7000ba --- /dev/null +++ b/src/services/github/core/search/trees.test.ts @@ -0,0 +1,108 @@ +import { beforeEach, describe, expect, it, vi } from "vite-plus/test"; + +const { axiosGet } = vi.hoisted(() => ({ + axiosGet: vi.fn(), +})); + +vi.mock("axios", () => ({ + default: { + get: axiosGet, + }, +})); + +vi.mock("../../config", () => ({ + shouldUseServerAPI: vi.fn(() => true), +})); + +vi.mock("../Auth", () => ({ + getAuthHeaders: vi.fn(() => ({})), +})); + +import { clearBranchTreeCache, getBranchTree } from "./trees"; + +describe("getBranchTree", () => { + beforeEach(() => { + clearBranchTreeCache(); + axiosGet.mockReset(); + }); + + it("reuses the cached tree when the branch head SHA is unchanged", async () => { + axiosGet.mockImplementation((url: string) => { + if (url.includes("action=getGitRef")) { + return Promise.resolve({ + data: { + object: { + sha: "commit-1", + }, + }, + }); + } + + if (url.includes("action=getTree")) { + return Promise.resolve({ + data: { + tree: [{ path: "src/main.ts", type: "blob", sha: "blob-1" }], + }, + }); + } + + throw new Error(`Unexpected request: ${url}`); + }); + + const first = await getBranchTree("main"); + const second = await getBranchTree("main"); + + expect(second).toBe(first); + expect( + axiosGet.mock.calls.filter(([url]) => String(url).includes("action=getTree")), + ).toHaveLength(1); + expect( + axiosGet.mock.calls.filter(([url]) => String(url).includes("action=getGitRef")), + ).toHaveLength(2); + }); + + it("invalidates the cached tree when the branch head SHA changes", async () => { + let currentSha = "commit-1"; + let treeVersion = 0; + + axiosGet.mockImplementation((url: string) => { + if (url.includes("action=getGitRef")) { + return Promise.resolve({ + data: { + object: { + sha: currentSha, + }, + }, + }); + } + + if (url.includes("action=getTree")) { + treeVersion += 1; + + return Promise.resolve({ + data: { + tree: [ + { + path: `src/file-${treeVersion.toString()}.ts`, + type: "blob", + sha: `blob-${treeVersion.toString()}`, + }, + ], + }, + }); + } + + throw new Error(`Unexpected request: ${url}`); + }); + + const first = await getBranchTree("main"); + currentSha = "commit-2"; + const second = await getBranchTree("main"); + + expect(second).not.toBe(first); + expect(second).toEqual([{ path: "src/file-2.ts", type: "blob", sha: "blob-2" }]); + expect( + axiosGet.mock.calls.filter(([url]) => String(url).includes("action=getTree")), + ).toHaveLength(2); + }); +}); diff --git a/src/services/github/core/search/trees.ts b/src/services/github/core/search/trees.ts index 83d7711..94032fc 100644 --- a/src/services/github/core/search/trees.ts +++ b/src/services/github/core/search/trees.ts @@ -8,6 +8,7 @@ */ import axios from "axios"; +import { SmartCache } from "@/utils/cache/SmartCache"; import { GITHUB_API_BASE, GITHUB_REPO_NAME, GITHUB_REPO_OWNER } from "../Config"; import { shouldUseServerAPI } from "../../config"; @@ -31,6 +32,76 @@ export interface GitTreeItem { sha?: string; } +interface GitRefResponse { + object?: { + sha?: string; + }; +} + +interface CachedBranchTree { + tree: GitTreeItem[] | null; +} + +const TREE_CACHE_TTL = 5 * 60 * 1000; +const TREE_CACHE_MAX_SIZE = 24; + +const branchTreeCache = new SmartCache({ + maxSize: TREE_CACHE_MAX_SIZE, + ttl: TREE_CACHE_TTL, + cleanupThreshold: 0.75, + cleanupRatio: 0.25, +}); + +const inFlightTreeRequests = new Map>(); + +function encodePathSegments(value: string): string { + return value + .split("/") + .map((segment) => encodeURIComponent(segment)) + .join("/"); +} + +function normalizeSha(value: string | undefined): string | null { + const normalized = value?.trim(); + return normalized !== undefined && normalized !== "" ? normalized : null; +} + +async function fetchBranchHeadShaViaServerApi(branch: string): Promise { + const query = new URLSearchParams({ + action: "getGitRef", + ref: `heads/${branch}`, + }); + + const response = await axios.get(`/api/github?${query.toString()}`); + const data = response.data as GitRefResponse; + return normalizeSha(data.object?.sha); +} + +async function fetchBranchHeadShaDirectly(branch: string): Promise { + const encodedRef = encodePathSegments(`heads/${branch}`); + const apiUrl = `${GITHUB_API_BASE}/repos/${GITHUB_REPO_OWNER}/${GITHUB_REPO_NAME}/git/ref/${encodedRef}`; + + const response = await fetch(apiUrl, { + method: "GET", + headers: getAuthHeaders(), + }); + + if (!response.ok) { + throw new Error(`HTTP ${response.status.toString()}: ${response.statusText}`); + } + + const data = (await response.json()) as GitRefResponse; + return normalizeSha(data.object?.sha); +} + +async function resolveBranchHeadSha(branch: string): Promise { + if (shouldUseServerAPI()) { + return fetchBranchHeadShaViaServerApi(branch); + } + + return fetchBranchHeadShaDirectly(branch); +} + /** * 通过服务端 API 获取分支树 * @@ -71,6 +142,19 @@ async function fetchTreeDirectly(branch: string): Promise return Array.isArray(data.tree) ? data.tree : null; } +function getTreeCacheKey(branch: string, branchHeadSha: string | null): string { + if (branchHeadSha !== null) { + return `sha:${branchHeadSha}`; + } + + return `branch:${branch}`; +} + +export function clearBranchTreeCache(): void { + branchTreeCache.clear(); + inFlightTreeRequests.clear(); +} + /** * 获取分支的完整文件树 * @@ -81,9 +165,42 @@ async function fetchTreeDirectly(branch: string): Promise * @returns Promise,解析为树节点数组,失败时返回 null */ export async function getBranchTree(branch: string): Promise { - if (shouldUseServerAPI()) { - return fetchTreeViaServerApi(branch); + const normalizedBranch = branch.trim(); + if (normalizedBranch === "") { + return null; + } + + let branchHeadSha: string | null = null; + try { + branchHeadSha = await resolveBranchHeadSha(normalizedBranch); + } catch { + // Ref 查询失败时回退到分支名级别缓存,避免影响搜索可用性。 + } + + const cacheKey = getTreeCacheKey(normalizedBranch, branchHeadSha); + const cached = branchTreeCache.get(cacheKey); + if (cached !== null) { + return cached.tree; } - return fetchTreeDirectly(branch); + const inFlightRequest = inFlightTreeRequests.get(cacheKey); + if (inFlightRequest !== undefined) { + return inFlightRequest; + } + + const request = ( + shouldUseServerAPI() + ? fetchTreeViaServerApi(normalizedBranch) + : fetchTreeDirectly(normalizedBranch) + ) + .then((tree) => { + branchTreeCache.set(cacheKey, { tree }); + return tree; + }) + .finally(() => { + inFlightTreeRequests.delete(cacheKey); + }); + + inFlightTreeRequests.set(cacheKey, request); + return request; } diff --git a/src/services/github/core/searchIndex/fetchers.ts b/src/services/github/core/searchIndex/fetchers.ts index 95406c3..f480ed7 100644 --- a/src/services/github/core/searchIndex/fetchers.ts +++ b/src/services/github/core/searchIndex/fetchers.ts @@ -9,6 +9,7 @@ import { getSearchIndexConfig } from "@/config"; import { logger } from "@/utils"; +import { buildAbsoluteAppUrl } from "@/utils/routing/basePath"; import { safeValidateSearchIndexManifest, @@ -30,13 +31,15 @@ interface DocfindModule { init?: (input?: RequestInfo | URL | Response) => Promise; } -const resolveUrl = (path: string): string => { +const resolveApiUrl = (path: string): string => { if (typeof window === "undefined") { return path; } return new URL(path, window.location.origin).toString(); }; +const resolveStaticAssetUrl = (path: string): string => buildAbsoluteAppUrl(path); + const ACTION_INDEX_BRANCH = "RV-Index"; const ACTION_INDEX_ROOT = "public"; @@ -62,7 +65,7 @@ const buildActionAssetUrl = ( if (typeof hash === "string" && hash.length > 0) { params.set("v", hash); } - return resolveUrl(`/api/github?${params.toString()}`); + return resolveApiUrl(`/api/github?${params.toString()}`); }; const resolveDocfindPath = (entry: SearchIndexBranchEntry): string => { @@ -90,7 +93,7 @@ const buildDocfindUrls = ( return { moduleUrl, wasmUrl }; } - const baseUrl = resolveUrl(resolveDocfindPath(entry)); + const baseUrl = resolveStaticAssetUrl(resolveDocfindPath(entry)); const moduleUrl = entry.hash.length > 0 ? `${baseUrl}?v=${encodeURIComponent(entry.hash)}` : baseUrl; const wasmBase = new URL("docfind_bg.wasm", baseUrl); @@ -116,7 +119,7 @@ export async function fetchManifest(signal?: AbortSignal): Promise { + it("normalizes CRLF content into preview lines", () => { + expect(normalizeContentLines("a\r\nb\r\n")).toEqual(["a", "b", ""]); + }); + + it("escapes plain text lines and preserves empty rows", () => { + expect(encodeLines(["", ""])).toEqual(["\u00A0", "<tag>"]); + }); + + it("highlights supported languages line by line", () => { + const highlighted = highlightContent("const value = 1;\nreturn value;", "typescript"); + + expect(highlighted).toHaveLength(2); + expect(highlighted[0]).toContain('class="token keyword"'); + expect(highlighted[0]).toContain("const"); + }); + + it("falls back to escaped text when no language is available", () => { + expect(highlightContent("
", null)).toEqual(["<div>"]); + }); +}); diff --git a/src/utils/content/prismHighlightCore.ts b/src/utils/content/prismHighlightCore.ts new file mode 100644 index 0000000..ee21062 --- /dev/null +++ b/src/utils/content/prismHighlightCore.ts @@ -0,0 +1,167 @@ +import * as Prism from "prismjs"; +import { encodeLines, escapeHtml, normalizeContentLines } from "./textPreviewLines"; +import "prismjs/components/prism-javascript"; +import "prismjs/components/prism-typescript"; +import "prismjs/components/prism-jsx"; +import "prismjs/components/prism-tsx"; +import "prismjs/components/prism-markup"; +import "prismjs/components/prism-css"; +import "prismjs/components/prism-scss"; +import "prismjs/components/prism-sass"; +import "prismjs/components/prism-json"; +import "prismjs/components/prism-yaml"; +import "prismjs/components/prism-python"; +import "prismjs/components/prism-java"; +import "prismjs/components/prism-kotlin"; +import "prismjs/components/prism-scala"; +import "prismjs/components/prism-csharp"; +import "prismjs/components/prism-clike"; +import "prismjs/components/prism-c"; +import "prismjs/components/prism-cpp"; +import "prismjs/components/prism-objectivec"; +import "prismjs/components/prism-swift"; +import "prismjs/components/prism-rust"; +import "prismjs/components/prism-go"; +import "prismjs/components/prism-ruby"; +import "prismjs/components/prism-php"; +import "prismjs/components/prism-lua"; +import "prismjs/components/prism-perl"; +import "prismjs/components/prism-bash"; +import "prismjs/components/prism-powershell"; +import "prismjs/components/prism-sql"; +import "prismjs/components/prism-ini"; +import "prismjs/components/prism-properties"; +import "prismjs/components/prism-toml"; +import "prismjs/components/prism-clojure"; +import "prismjs/components/prism-elixir"; +import "prismjs/components/prism-erlang"; +import "prismjs/components/prism-haskell"; +import "prismjs/components/prism-markdown"; +import "prismjs/components/prism-latex"; +import "prismjs/components/prism-docker"; +import "prismjs/components/prism-gradle"; +import "prismjs/components/prism-cmake"; +import "prismjs/components/prism-makefile"; +import "prismjs/components/prism-nix"; +import "prismjs/components/prism-dart"; +import "prismjs/components/prism-git"; +import "prismjs/components/prism-batch"; + +type TokenValue = string | TokenValue[] | { type: string; content: TokenValue } | null | undefined; + +function encodeHtml(value: string): string { + const encoded = Prism.util.encode(value); + return typeof encoded === "string" ? encoded : escapeHtml(value); +} + +function tokensToHtml(tokens: TokenValue): string { + if (typeof tokens === "string") { + return encodeHtml(tokens); + } + + if (Array.isArray(tokens)) { + return tokens.map((token) => tokensToHtml(token)).join(""); + } + + if (tokens !== null && tokens !== undefined && typeof tokens === "object") { + if ("type" in tokens && typeof tokens.type === "string" && "content" in tokens) { + const content = tokensToHtml(tokens.content); + return `${content}`; + } + } + + return ""; +} + +/** + * 高亮文本文件的每一行 + * + * 为了保持 HTML 标签的完整性并确保每行都能正确高亮, + * 我们先将整个代码块高亮,然后使用标记来分割行。 + * + * @param html - 高亮后的 HTML 字符串 + * @param lineCount - 需要切分的行数 + * @returns 每行高亮后的 HTML 字符串数组 + */ +function splitHighlightedHtml(html: string, lineCount: number): string[] { + const result: string[] = []; + let currentLine = ""; + let inTag = false; + let tagBuffer = ""; + const openTags: string[] = []; + + for (const char of html) { + if (char === "<") { + inTag = true; + tagBuffer = "<"; + } else if (char === ">") { + tagBuffer += ">"; + inTag = false; + currentLine += tagBuffer; + + if (tagBuffer.startsWith("")) { + openTags.push(tagBuffer); + } + + tagBuffer = ""; + } else if (inTag) { + tagBuffer = tagBuffer + char; + } else if (char === "\n") { + result.push(currentLine !== "" ? currentLine : "\u00A0"); + + currentLine = ""; + for (const tag of openTags) { + currentLine = currentLine + tag; + } + } else { + currentLine = currentLine + char; + } + } + + if (currentLine !== "" || result.length === 0) { + result.push(currentLine !== "" ? currentLine : "\u00A0"); + } + + while (result.length < lineCount) { + let paddingLine = ""; + if (openTags.length > 0) { + for (const tag of openTags) { + paddingLine = paddingLine + tag; + } + } + result.push(paddingLine !== "" ? paddingLine : "\u00A0"); + } + + return result.slice(0, lineCount); +} + +export function highlightLines(lines: string[], language: string | null): string[] { + if (language === null || language === "") { + return encodeLines(lines); + } + + const grammar = Prism.languages[language]; + if (grammar === undefined) { + return encodeLines(lines); + } + + const fullCode = lines.join("\n"); + + try { + const tokens = Prism.tokenize(fullCode, grammar); + return splitHighlightedHtml(tokensToHtml(tokens), lines.length); + } catch { + try { + const highlighted = Prism.highlight(fullCode, grammar, language); + return splitHighlightedHtml(highlighted, lines.length); + } catch { + return encodeLines(lines); + } + } +} + +export function highlightContent(content: string, language: string | null): string[] { + return highlightLines(normalizeContentLines(content), language); +} diff --git a/src/utils/content/prismHighlighter.ts b/src/utils/content/prismHighlighter.ts index c8eee78..5f7ab90 100644 --- a/src/utils/content/prismHighlighter.ts +++ b/src/utils/content/prismHighlighter.ts @@ -1,210 +1,9 @@ import * as Prism from "prismjs"; import { detectLanguage } from "./languageDetector"; +import { highlightContent, highlightLines } from "./prismHighlightCore"; import { logger } from "@/utils"; -import "prismjs/components/prism-javascript"; -import "prismjs/components/prism-typescript"; -import "prismjs/components/prism-jsx"; -import "prismjs/components/prism-tsx"; -import "prismjs/components/prism-markup"; -import "prismjs/components/prism-css"; -import "prismjs/components/prism-scss"; -import "prismjs/components/prism-sass"; -import "prismjs/components/prism-json"; -import "prismjs/components/prism-yaml"; -import "prismjs/components/prism-python"; -import "prismjs/components/prism-java"; -import "prismjs/components/prism-kotlin"; -import "prismjs/components/prism-scala"; -import "prismjs/components/prism-csharp"; -import "prismjs/components/prism-clike"; -import "prismjs/components/prism-c"; -import "prismjs/components/prism-cpp"; -import "prismjs/components/prism-objectivec"; -import "prismjs/components/prism-swift"; -import "prismjs/components/prism-rust"; -import "prismjs/components/prism-go"; -import "prismjs/components/prism-ruby"; -import "prismjs/components/prism-php"; -import "prismjs/components/prism-lua"; -import "prismjs/components/prism-perl"; -import "prismjs/components/prism-bash"; -import "prismjs/components/prism-powershell"; -import "prismjs/components/prism-sql"; -import "prismjs/components/prism-ini"; -import "prismjs/components/prism-properties"; -import "prismjs/components/prism-toml"; -import "prismjs/components/prism-clojure"; -import "prismjs/components/prism-elixir"; -import "prismjs/components/prism-erlang"; -import "prismjs/components/prism-haskell"; -import "prismjs/components/prism-markdown"; -import "prismjs/components/prism-latex"; -import "prismjs/components/prism-docker"; -import "prismjs/components/prism-gradle"; -import "prismjs/components/prism-cmake"; -import "prismjs/components/prism-makefile"; -import "prismjs/components/prism-nix"; -import "prismjs/components/prism-dart"; -import "prismjs/components/prism-git"; -import "prismjs/components/prism-batch"; - -/** - * 高亮文本文件的每一行 - * - * 为了保持 HTML 标签的完整性并确保每行都能正确高亮, - * 我们先将整个代码块高亮,然后使用标记来分割行。 - * - * @param html - 高亮后的 HTML 字符串 - * @param lineCount - 需要切分的行数 - * @returns 每行高亮后的 HTML 字符串数组 - */ -function splitHighlightedHtml(html: string, lineCount: number): string[] { - const result: string[] = []; - let currentLine = ""; - let inTag = false; - let tagBuffer = ""; - const openTags: string[] = []; - - for (const char of html) { - if (char === "<") { - inTag = true; - tagBuffer = "<"; - } else if (char === ">") { - tagBuffer += ">"; - inTag = false; - currentLine += tagBuffer; - - // 处理标签栈 - if (tagBuffer.startsWith("")) { - // 开始标签(非自闭合):保存完整标签以便在换行时重新打开 - openTags.push(tagBuffer); - } - - tagBuffer = ""; - } else if (inTag) { - tagBuffer = tagBuffer + char; - } else if (char === "\n") { - // 换行:保存当前行 - // 需要在当前行末尾关闭未完成的标签,并在下一行重新打开 - result.push(currentLine !== "" ? currentLine : "\u00A0"); - - // 准备下一行:重新打开所有未关闭的标签 - currentLine = ""; - // 将未关闭的标签重新添加到下一行开头,保持语法高亮上下文 - for (const tag of openTags) { - currentLine = currentLine + tag; - } - } else { - currentLine = currentLine + char; - } - } - - // 添加最后一行(可能包含未关闭的标签) - if (currentLine !== "" || result.length === 0) { - result.push(currentLine !== "" ? currentLine : "\u00A0"); - } - - // 确保行数匹配 - while (result.length < lineCount) { - // 如果还有未关闭的标签,需要在新增的行中继续打开 - let paddingLine = ""; - if (openTags.length > 0) { - for (const tag of openTags) { - paddingLine = paddingLine + tag; - } - } - result.push(paddingLine !== "" ? paddingLine : "\u00A0"); - } - - return result.slice(0, lineCount); -} - -export function highlightLines(lines: string[], language: string | null): string[] { - if (language === null || language === "") { - // 如果没有检测到语言,只转义每行 - return lines.map((line) => { - const encoded = Prism.util.encode(line); - return typeof encoded === "string" - ? encoded - : line.replace(/&/g, "&").replace(//g, ">"); - }); - } - - // 检查语言是否已加载 - const grammar = Prism.languages[language]; - if (grammar === undefined) { - // 语言未加载,只转义每行 - return lines.map((line) => { - const encoded = Prism.util.encode(line); - return typeof encoded === "string" - ? encoded - : line.replace(/&/g, "&").replace(//g, ">"); - }); - } - - // 将整个代码块一起高亮,然后智能分割 - // 这样可以保持上下文(如多行注释、字符串等) - const fullCode = lines.join("\n"); - - // 使用 tokenize 方法,这是更底层且更安全的 API - try { - // 先使用 tokenize 分词 - const tokens = Prism.tokenize(fullCode, grammar); - - // 将 tokens 递归转换为 HTML 字符串 - type TokenValue = - | string - | TokenValue[] - | { type: string; content: TokenValue } - | null - | undefined; - - function tokensToHtml(tokens: TokenValue): string { - if (typeof tokens === "string") { - const encoded = Prism.util.encode(tokens); - return typeof encoded === "string" - ? encoded - : tokens.replace(/&/g, "&").replace(//g, ">"); - } - if (Array.isArray(tokens)) { - return tokens.map((token) => tokensToHtml(token)).join(""); - } - if (tokens !== null && tokens !== undefined && typeof tokens === "object") { - // 检查是否是 Prism.Token 实例 - if ("type" in tokens && typeof tokens.type === "string" && "content" in tokens) { - const content = tokensToHtml(tokens.content); - return `${content}`; - } - } - return ""; - } - - const highlightedString = tokensToHtml(tokens); - return splitHighlightedHtml(highlightedString, lines.length); - } catch (error) { - logger.warn("[Prism] Tokenize failed, trying highlight API:", error); - - try { - const highlighted = Prism.highlight(fullCode, grammar, language); - return splitHighlightedHtml(highlighted, lines.length); - } catch (highlightError) { - // 完全失败,只转义 HTML,不进行语法高亮 - logger.warn("[Prism] Highlight also failed, using plain text:", highlightError); - return lines.map((line) => { - if (line.length === 0) { - return "\u00A0"; - } - const encoded = Prism.util.encode(line); - return typeof encoded === "string" - ? encoded - : line.replace(/&/g, "&").replace(//g, ">"); - }); - } - } -} +export { highlightContent, highlightLines } from "./prismHighlightCore"; +export { encodeLines, normalizeContentLines } from "./textPreviewLines"; /** * 根据文件名获取高亮后的代码 @@ -248,6 +47,5 @@ export function highlightCodeByFilename(code: string, filename: string | undefin } } - const lines = code.replace(/\r\n/g, "\n").split("\n"); - return highlightLines(lines, language); + return highlightContent(code, language); } diff --git a/src/utils/content/prismHighlighter.worker.ts b/src/utils/content/prismHighlighter.worker.ts new file mode 100644 index 0000000..034e938 --- /dev/null +++ b/src/utils/content/prismHighlighter.worker.ts @@ -0,0 +1,35 @@ +import { highlightContent } from "./prismHighlightCore"; + +interface PrismHighlightRequest { + id: number; + content: string; + language: string | null; +} + +interface PrismHighlightSuccessResponse { + id: number; + highlightedLines: string[]; +} + +interface PrismHighlightErrorResponse { + id: number; + error: string; +} + +const workerScope = self as DedicatedWorkerGlobalScope; + +workerScope.onmessage = (event: MessageEvent): void => { + const { id, content, language } = event.data; + + try { + const highlightedLines = highlightContent(content, language); + const response: PrismHighlightSuccessResponse = { id, highlightedLines }; + workerScope.postMessage(response); + } catch (error) { + const message = error instanceof Error ? error.message : "Unknown Prism worker error"; + const response: PrismHighlightErrorResponse = { id, error: message }; + workerScope.postMessage(response); + } +}; + +export {}; diff --git a/src/utils/content/textPreviewLines.ts b/src/utils/content/textPreviewLines.ts new file mode 100644 index 0000000..ad99c88 --- /dev/null +++ b/src/utils/content/textPreviewLines.ts @@ -0,0 +1,11 @@ +export function normalizeContentLines(content: string): string[] { + return content.replace(/\r\n/g, "\n").split("\n"); +} + +export function escapeHtml(value: string): string { + return value.replace(/&/g, "&").replace(//g, ">"); +} + +export function encodeLines(lines: string[]): string[] { + return lines.map((line) => (line.length === 0 ? "\u00A0" : escapeHtml(line))); +} diff --git a/src/utils/download/folderZipPipeline.test.ts b/src/utils/download/folderZipPipeline.test.ts new file mode 100644 index 0000000..456df9f --- /dev/null +++ b/src/utils/download/folderZipPipeline.test.ts @@ -0,0 +1,245 @@ +import { beforeEach, describe, expect, it, vi } from "vite-plus/test"; +import { unzipSync } from "fflate"; + +import { createAbortError } from "@/utils/network/abort"; + +import { downloadFolderAsZip, prepareZipOutputSink } from "./folderZipPipeline"; + +const encoder = new TextEncoder(); +const decoder = new TextDecoder(); + +const createStreamResponse = ( + chunks: string[], + options: { + blobSpy?: ReturnType; + } = {}, +): Response => { + const stream = new ReadableStream({ + start(controller) { + for (const chunk of chunks) { + controller.enqueue(encoder.encode(chunk)); + } + controller.close(); + }, + }); + + return { + ok: true, + status: 200, + statusText: "OK", + body: stream, + blob: options.blobSpy ?? vi.fn(), + } as unknown as Response; +}; + +const concatChunks = (chunks: Uint8Array[]): Uint8Array => { + const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0); + const output = new Uint8Array(totalLength); + let offset = 0; + + for (const chunk of chunks) { + output.set(chunk, offset); + offset += chunk.length; + } + + return output; +}; + +describe("downloadFolderAsZip", () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + it("falls back to Blob download and streams file chunks into the zip", async () => { + const saveAsImpl = vi.fn(); + const blobSpy = vi.fn(); + const fetchImpl = vi.fn(async () => createStreamResponse(["hello ", "world"], { blobSpy })); + + await downloadFolderAsZip({ + files: [{ path: "docs/readme.txt", url: "https://example.com/readme.txt" }], + signal: new AbortController().signal, + archiveName: "docs.zip", + fetchImpl, + saveAsImpl, + showSaveFilePickerImpl: null, + }); + + expect(fetchImpl).toHaveBeenCalledTimes(1); + expect(blobSpy).not.toHaveBeenCalled(); + expect(saveAsImpl).toHaveBeenCalledTimes(1); + + const [savedBlob, savedName] = saveAsImpl.mock.calls[0] as [Blob, string]; + expect(savedName).toBe("docs.zip"); + + const unzipped = unzipSync(new Uint8Array(await savedBlob.arrayBuffer())); + expect(decoder.decode(unzipped["docs/readme.txt"])).toBe("hello world"); + }); + + it("can prepare the native output sink before downloads start", async () => { + const writtenChunks: Uint8Array[] = []; + const saveAsImpl = vi.fn(); + const write = vi.fn(async (chunk: Uint8Array) => { + writtenChunks.push(chunk); + }); + const close = vi.fn(async () => {}); + const abort = vi.fn(async () => {}); + const writable = { + write, + close, + abort, + } as unknown as FileSystemWritableFileStream; + const createWritable = vi.fn(async () => writable); + const showSaveFilePickerImpl = vi.fn(async () => ({ + createWritable, + })) as unknown as (options?: unknown) => Promise; + const fetchImpl = vi.fn(async () => createStreamResponse(["native sink"])); + const outputSink = await prepareZipOutputSink({ + archiveName: "native.zip", + saveAsImpl, + showSaveFilePickerImpl, + }); + + expect(showSaveFilePickerImpl).toHaveBeenCalledTimes(1); + expect(createWritable).toHaveBeenCalledTimes(1); + + await downloadFolderAsZip({ + files: [{ path: "docs/native.txt", url: "https://example.com/native.txt" }], + signal: new AbortController().signal, + archiveName: "native.zip", + outputSink, + fetchImpl, + saveAsImpl, + showSaveFilePickerImpl, + }); + + expect(showSaveFilePickerImpl).toHaveBeenCalledTimes(1); + expect(saveAsImpl).not.toHaveBeenCalled(); + expect(write).toHaveBeenCalled(); + expect(close).toHaveBeenCalledTimes(1); + + const unzipped = unzipSync(concatChunks(writtenChunks)); + expect(decoder.decode(unzipped["docs/native.txt"])).toBe("native sink"); + }); + + it("continues when a single file download fails", async () => { + const saveAsImpl = vi.fn(); + const onFileError = vi.fn(); + const fetchImpl = vi.fn(async (url: RequestInfo | URL) => { + const target = typeof url === "string" ? url : url instanceof URL ? url.toString() : url.url; + + if (target.includes("missing")) { + return { + ok: false, + status: 404, + statusText: "Not Found", + body: null, + } as unknown as Response; + } + + return createStreamResponse(["kept"]); + }); + + await downloadFolderAsZip({ + files: [ + { path: "docs/missing.txt", url: "https://example.com/missing.txt" }, + { path: "docs/kept.txt", url: "https://example.com/kept.txt" }, + ], + signal: new AbortController().signal, + archiveName: "partial.zip", + fetchImpl, + saveAsImpl, + showSaveFilePickerImpl: null, + onFileError, + }); + + expect(onFileError).toHaveBeenCalledTimes(1); + const [savedBlob] = saveAsImpl.mock.calls[0] as [Blob, string]; + const unzipped = unzipSync(new Uint8Array(await savedBlob.arrayBuffer())); + + expect(Object.keys(unzipped)).toEqual(["docs/kept.txt"]); + expect(decoder.decode(unzipped["docs/kept.txt"])).toBe("kept"); + }); + + it("aborts the active reader and sink when cancellation happens mid-stream", async () => { + const abortController = new AbortController(); + const cancel = vi.fn(async () => {}); + const releaseLock = vi.fn(); + const write = vi.fn(async () => {}); + const close = vi.fn(async () => {}); + const abort = vi.fn(async () => {}); + let resolveSecondReadStart: (() => void) | null = null; + const secondReadStarted = new Promise((resolve) => { + resolveSecondReadStart = resolve; + }); + const reader = { + read: vi + .fn() + .mockResolvedValueOnce({ done: false, value: encoder.encode("chunk-1") }) + .mockImplementationOnce( + () => + new Promise>((_, reject) => { + resolveSecondReadStart?.(); + abortController.signal.addEventListener( + "abort", + () => reject(createAbortError("Download aborted")), + { once: true }, + ); + }), + ), + cancel, + releaseLock, + }; + const fetchImpl = vi.fn(async () => ({ + ok: true, + status: 200, + statusText: "OK", + body: { + getReader: () => reader, + }, + })) as typeof fetch; + const showSaveFilePickerImpl = vi.fn(async () => ({ + createWritable: async () => + ({ + write, + close, + abort, + }) as unknown as FileSystemWritableFileStream, + })) as unknown as (options?: unknown) => Promise; + + const promise = downloadFolderAsZip({ + files: [{ path: "docs/stream.txt", url: "https://example.com/stream.txt" }], + signal: abortController.signal, + archiveName: "stream.zip", + fetchImpl, + showSaveFilePickerImpl, + }); + + await secondReadStarted; + abortController.abort(); + + await expect(promise).rejects.toMatchObject({ name: "AbortError" }); + expect(cancel).toHaveBeenCalledTimes(1); + expect(releaseLock).toHaveBeenCalledTimes(1); + expect(abort).toHaveBeenCalledTimes(1); + expect(close).not.toHaveBeenCalled(); + }); + + it("treats picker cancellation as an abort without starting any fetches", async () => { + const fetchImpl = vi.fn(); + const showSaveFilePickerImpl = vi.fn(async () => { + throw createAbortError("User cancelled"); + }) as unknown as (options?: unknown) => Promise; + + await expect( + downloadFolderAsZip({ + files: [{ path: "docs/ignored.txt", url: "https://example.com/ignored.txt" }], + signal: new AbortController().signal, + archiveName: "ignored.zip", + fetchImpl: fetchImpl as typeof fetch, + showSaveFilePickerImpl, + }), + ).rejects.toMatchObject({ name: "AbortError" }); + + expect(fetchImpl).not.toHaveBeenCalled(); + }); +}); diff --git a/src/utils/download/folderZipPipeline.ts b/src/utils/download/folderZipPipeline.ts new file mode 100644 index 0000000..aa77b70 --- /dev/null +++ b/src/utils/download/folderZipPipeline.ts @@ -0,0 +1,347 @@ +import { saveAs } from "file-saver"; +import { Zip, ZipDeflate } from "fflate"; + +import { createAbortError, isAbortError } from "@/utils/network/abort"; + +const ZIP_MIME_TYPE = "application/zip"; +const DEFAULT_COMPRESSION_LEVEL = 6; + +interface SaveAsLike { + (data: Blob, filename: string): void; +} + +interface SaveFilePickerOptionsLike { + suggestedName?: string; + types?: Array<{ + description?: string; + accept: Record; + }>; +} + +type ShowSaveFilePickerLike = ( + options?: SaveFilePickerOptionsLike, +) => Promise; + +export interface FolderDownloadEntry { + path: string; + url: string; + size?: number; +} + +export interface ZipOutputSink { + write(chunk: Uint8Array): Promise; + close(): Promise; + abort(reason?: unknown): Promise; +} + +export interface DownloadFolderAsZipOptions { + files: FolderDownloadEntry[]; + signal: AbortSignal; + archiveName: string; + outputSink?: ZipOutputSink; + compressionLevel?: number; + onFileComplete?: (processedCount: number, totalFiles: number) => void; + onFileError?: (file: FolderDownloadEntry, error: Error) => void; + fetchImpl?: typeof fetch; + saveAsImpl?: SaveAsLike; + showSaveFilePickerImpl?: ShowSaveFilePickerLike | null; +} + +class BlobZipOutputSink implements ZipOutputSink { + private readonly chunks: Uint8Array[] = []; + private closed = false; + private aborted = false; + + constructor( + private readonly archiveName: string, + private readonly saveAsImpl: SaveAsLike, + ) {} + + async write(chunk: Uint8Array): Promise { + if (this.closed || this.aborted) { + return; + } + + this.chunks.push(chunk); + } + + async close(): Promise { + if (this.closed || this.aborted) { + return; + } + + this.closed = true; + this.saveAsImpl(new Blob(this.chunks, { type: ZIP_MIME_TYPE }), this.archiveName); + this.chunks.length = 0; + } + + async abort(): Promise { + this.aborted = true; + this.chunks.length = 0; + } +} + +class NativeFileZipOutputSink implements ZipOutputSink { + private closed = false; + private aborted = false; + + constructor(private readonly writable: FileSystemWritableFileStream) {} + + async write(chunk: Uint8Array): Promise { + if (this.closed || this.aborted) { + return; + } + + await this.writable.write(chunk); + } + + async close(): Promise { + if (this.closed || this.aborted) { + return; + } + + this.closed = true; + await this.writable.close(); + } + + async abort(reason?: unknown): Promise { + if (this.closed || this.aborted) { + return; + } + + this.aborted = true; + + try { + await this.writable.abort(reason); + } catch { + try { + await this.writable.close(); + } catch { + // Ignore close failures during abort cleanup. + } + } + } +} + +class ZipChunkWriter { + private pending: Promise = Promise.resolve(); + + constructor( + private readonly sink: ZipOutputSink, + private readonly signal: AbortSignal, + ) {} + + enqueue(chunk: Uint8Array): Promise { + this.pending = this.pending.then(async () => { + throwIfAborted(this.signal); + await this.sink.write(chunk); + }); + + return this.pending; + } + + async flush(): Promise { + await this.pending; + } +} + +const throwIfAborted = (signal: AbortSignal): void => { + if (signal.aborted) { + throw createAbortError("Download aborted"); + } +}; + +const toError = (error: unknown): Error => { + if (error instanceof Error) { + return error; + } + + return new Error(String(error)); +}; + +const getDefaultFetch = (): typeof fetch => { + if (typeof globalThis.fetch !== "function") { + throw new Error("Fetch API is unavailable"); + } + + return globalThis.fetch.bind(globalThis); +}; + +const getDefaultShowSaveFilePicker = (): ShowSaveFilePickerLike | null => { + const picker = ( + globalThis as typeof globalThis & { + showSaveFilePicker?: ShowSaveFilePickerLike; + } + ).showSaveFilePicker; + + return typeof picker === "function" ? picker.bind(globalThis) : null; +}; + +const createZipOutputSink = async ( + archiveName: string, + saveAsImpl: SaveAsLike, + showSaveFilePickerImpl: ShowSaveFilePickerLike | null, +): Promise => { + if (showSaveFilePickerImpl === null) { + return new BlobZipOutputSink(archiveName, saveAsImpl); + } + + try { + const fileHandle = await showSaveFilePickerImpl({ + suggestedName: archiveName, + types: [ + { + description: "ZIP archive", + accept: { + [ZIP_MIME_TYPE]: [".zip"], + }, + }, + ], + }); + + const writable = await fileHandle.createWritable(); + return new NativeFileZipOutputSink(writable); + } catch (error) { + if (isAbortError(error)) { + throw createAbortError("Download aborted"); + } + + throw error; + } +}; + +export const prepareZipOutputSink = async ({ + archiveName, + saveAsImpl = saveAs, + showSaveFilePickerImpl = getDefaultShowSaveFilePicker(), +}: Pick< + DownloadFolderAsZipOptions, + "archiveName" | "saveAsImpl" | "showSaveFilePickerImpl" +>): Promise => createZipOutputSink(archiveName, saveAsImpl, showSaveFilePickerImpl); + +const appendFileToZip = async ( + zip: Zip, + writer: ZipChunkWriter, + file: FolderDownloadEntry, + fetchImpl: typeof fetch, + signal: AbortSignal, + compressionLevel: number, +): Promise => { + throwIfAborted(signal); + + const response = await fetchImpl(file.url, { signal }); + + if (!response.ok) { + throw new Error(`下载失败: ${String(response.status)} ${response.statusText}`); + } + + if (response.body === null) { + throw new Error("当前浏览器不支持流式读取下载内容"); + } + + const zipEntry = new ZipDeflate(file.path, { level: compressionLevel }); + zip.add(zipEntry); + + const reader = response.body.getReader(); + + try { + while (true) { + throwIfAborted(signal); + const { done, value } = await reader.read(); + + if (done) { + zipEntry.push(new Uint8Array(0), true); + await writer.flush(); + return; + } + + if (value !== undefined && value.length > 0) { + zipEntry.push(value, false); + await writer.flush(); + } + } + } catch (error) { + try { + await reader.cancel(error); + } catch { + // Ignore reader cancellation failures during cleanup. + } + + throw error; + } finally { + reader.releaseLock(); + } +}; + +export const downloadFolderAsZip = async ({ + files, + signal, + archiveName, + outputSink, + compressionLevel = DEFAULT_COMPRESSION_LEVEL, + onFileComplete, + onFileError, + fetchImpl = getDefaultFetch(), + saveAsImpl = saveAs, + showSaveFilePickerImpl = getDefaultShowSaveFilePicker(), +}: DownloadFolderAsZipOptions): Promise => { + throwIfAborted(signal); + + const sink = + outputSink ?? (await createZipOutputSink(archiveName, saveAsImpl, showSaveFilePickerImpl)); + const writer = new ZipChunkWriter(sink, signal); + let zipCallbackError: Error | null = null; + + const zip = new Zip((error, chunk) => { + if (error !== null) { + zipCallbackError = toError(error); + return; + } + + void writer.enqueue(chunk).catch((writerError) => { + zipCallbackError = toError(writerError); + }); + }); + + try { + let processedCount = 0; + + for (const file of files) { + throwIfAborted(signal); + + try { + await appendFileToZip(zip, writer, file, fetchImpl, signal, compressionLevel); + processedCount += 1; + onFileComplete?.(processedCount, files.length); + } catch (error) { + if (isAbortError(error)) { + throw createAbortError("Download aborted"); + } + + onFileError?.(file, toError(error)); + } + + if (zipCallbackError !== null) { + throw zipCallbackError; + } + } + + zip.end(); + await writer.flush(); + + if (zipCallbackError !== null) { + throw zipCallbackError; + } + + await sink.close(); + } catch (error) { + zip.terminate(); + await sink.abort(error); + + if (isAbortError(error)) { + throw createAbortError("Download aborted"); + } + + throw toError(error); + } +}; diff --git a/src/utils/network/abort.ts b/src/utils/network/abort.ts new file mode 100644 index 0000000..810cc3f --- /dev/null +++ b/src/utils/network/abort.ts @@ -0,0 +1,17 @@ +export const createAbortError = (message = "Request aborted"): Error => { + const error = new Error(message); + error.name = "AbortError"; + return error; +}; + +export const isAbortError = (error: unknown): error is Error & { code?: string } => { + if (!(error instanceof Error)) { + return false; + } + + return ( + error.name === "AbortError" || + error.name === "CanceledError" || + (typeof error.code === "string" && error.code === "ERR_CANCELED") + ); +}; diff --git a/src/utils/pdf/pdfPreviewHelper.ts b/src/utils/pdf/pdfPreviewHelper.ts index 8cbab89..46e8cae 100644 --- a/src/utils/pdf/pdfPreviewHelper.ts +++ b/src/utils/pdf/pdfPreviewHelper.ts @@ -26,6 +26,8 @@ export interface PDFPreviewOptions { fileName: string; /** 下载 URL */ downloadUrl: string; + /** 服务端代理 URL(可选) */ + serverProxyUrl?: string; /** MUI 主题对象 */ theme: Theme; /** 翻译文本 */ @@ -308,7 +310,7 @@ function createFallbackLink(downloadUrl: string): void { * ``` */ export async function openPDFPreview(options: PDFPreviewOptions): Promise { - const { fileName, downloadUrl, theme, translations, isDev = false } = options; + const { fileName, downloadUrl, serverProxyUrl, theme, translations, isDev = false } = options; // 初始化预览窗口 const newTab = initializePDFWindow(fileName, theme, translations); @@ -322,9 +324,11 @@ export async function openPDFPreview(options: PDFPreviewOptions): Promise const themeColors = extractPDFThemeColors(theme); // 根据环境选择下载 URL - const finalDownloadUrl = isDev - ? downloadUrl - : `/api/github?action=getFileContent&url=${encodeURIComponent(downloadUrl)}`; + const finalDownloadUrl = + serverProxyUrl ?? + (isDev + ? downloadUrl + : `/api/github?action=getGitHubAsset&url=${encodeURIComponent(downloadUrl)}`); // 下载并显示 PDF await downloadAndDisplayPDF(newTab, finalDownloadUrl, fileName, themeColors, translations); diff --git a/src/utils/request/requestManager.ts b/src/utils/request/requestManager.ts index b621c8b..7c575a6 100644 --- a/src/utils/request/requestManager.ts +++ b/src/utils/request/requestManager.ts @@ -5,6 +5,7 @@ */ import { logger } from "@/utils"; +import { createAbortError, isAbortError } from "@/utils/network/abort"; /** * 请求选项 @@ -24,6 +25,11 @@ export interface RequestOptions { * 是否记录详细日志 */ verbose?: boolean; + + /** + * 外部中止信号 + */ + signal?: AbortSignal; } /** @@ -39,19 +45,13 @@ export class RequestManager { private pendingRequests = new Map(); private debounceTimers = new Map>(); - private createAbortError(): Error { - const error = new Error("Request aborted"); - error.name = "AbortError"; - return error; - } - private async waitForDebounce(key: string, delay: number, signal: AbortSignal): Promise { if (delay <= 0) { return; } if (signal.aborted) { - throw this.createAbortError(); + throw createAbortError(); } await new Promise((resolve, reject) => { @@ -78,7 +78,7 @@ export class RequestManager { const onAbort = (): void => { finalize(() => { - reject(this.createAbortError()); + reject(createAbortError()); }, onAbort); }; @@ -119,7 +119,7 @@ export class RequestManager { fetcher: (signal: AbortSignal) => Promise, options: RequestOptions = {}, ): Promise { - const { debounce, verbose = false } = options; + const { debounce, verbose = false, signal: externalSignal } = options; // 取消之前的同 key 请求 this.cancel(key); @@ -127,25 +127,36 @@ export class RequestManager { // 创建新的 AbortController const controller = new AbortController(); this.pendingRequests.set(key, controller); + const abortFromExternal = (): void => { + controller.abort(); + }; - // 如果设置了防抖,等待防抖延迟 - if (debounce !== undefined && debounce > 0) { - if (verbose) { - logger.debug(`请求防抖: ${key}, 延迟 ${debounce.toString()}ms`); + if (externalSignal !== undefined) { + if (externalSignal.aborted) { + controller.abort(); + } else { + externalSignal.addEventListener("abort", abortFromExternal, { once: true }); } - - await this.waitForDebounce(key, debounce, controller.signal); } - if (controller.signal.aborted) { - throw this.createAbortError(); - } + try { + // 如果设置了防抖,等待防抖延迟 + if (debounce !== undefined && debounce > 0) { + if (verbose) { + logger.debug(`请求防抖: ${key}, 延迟 ${debounce.toString()}ms`); + } - if (verbose) { - logger.debug(`开始请求: ${key}`); - } + await this.waitForDebounce(key, debounce, controller.signal); + } + + if (controller.signal.aborted) { + throw createAbortError(); + } + + if (verbose) { + logger.debug(`开始请求: ${key}`); + } - try { const result = await fetcher(controller.signal); // 请求成功,清理 @@ -161,16 +172,20 @@ export class RequestManager { this.pendingRequests.delete(key); // 如果是取消错误,不记录日志 - if (error instanceof Error && error.name === "AbortError") { + if (isAbortError(error)) { if (verbose) { logger.debug(`请求已取消: ${key}`); } - throw error; + throw createAbortError(); } // 其他错误正常记录 logger.error(`请求失败: ${key}`, error); throw error; + } finally { + if (externalSignal !== undefined) { + externalSignal.removeEventListener("abort", abortFromExternal); + } } } diff --git a/src/utils/routing/basePath.test.ts b/src/utils/routing/basePath.test.ts new file mode 100644 index 0000000..c4ae584 --- /dev/null +++ b/src/utils/routing/basePath.test.ts @@ -0,0 +1,40 @@ +import { describe, expect, it } from "vite-plus/test"; + +import { + buildAbsoluteAppUrl, + buildAppPath, + getAppBasePath, + normalizeBaseUrl, + stripBasePath, +} from "./basePath"; + +describe("basePath", () => { + it("normalizes configured base urls", () => { + expect(normalizeBaseUrl("repo-viewer")).toBe("/repo-viewer/"); + expect(normalizeBaseUrl("/repo-viewer")).toBe("/repo-viewer/"); + expect(normalizeBaseUrl("/repo-viewer/")).toBe("/repo-viewer/"); + expect(normalizeBaseUrl("/")).toBe("/"); + }); + + it("builds app-local paths with base prefix", () => { + expect(getAppBasePath("/repo-viewer/")).toBe("/repo-viewer"); + expect(buildAppPath("", "/repo-viewer/")).toBe("/repo-viewer/"); + expect(buildAppPath("docs/guide", "/repo-viewer/")).toBe("/repo-viewer/docs/guide"); + expect(buildAppPath("/docs/guide", "/repo-viewer/")).toBe("/repo-viewer/docs/guide"); + }); + + it("strips the configured base from window pathname values", () => { + expect(stripBasePath("/repo-viewer/docs/guide", "/repo-viewer/")).toBe("/docs/guide"); + expect(stripBasePath("/repo-viewer", "/repo-viewer/")).toBe("/"); + expect(stripBasePath("/docs/guide", "/repo-viewer/")).toBe("/docs/guide"); + }); + + it("builds absolute app urls from the configured base", () => { + expect( + buildAbsoluteAppUrl("search-index/manifest.json", { + baseUrl: "/repo-viewer/", + origin: "https://example.com", + }), + ).toBe("https://example.com/repo-viewer/search-index/manifest.json"); + }); +}); diff --git a/src/utils/routing/basePath.ts b/src/utils/routing/basePath.ts new file mode 100644 index 0000000..6329a1b --- /dev/null +++ b/src/utils/routing/basePath.ts @@ -0,0 +1,76 @@ +const ROOT_PATH = "/"; + +export function normalizeBaseUrl(baseUrl: string): string { + const trimmed = baseUrl.trim(); + + if (trimmed === "" || trimmed === ROOT_PATH) { + return ROOT_PATH; + } + + const withLeadingSlash = trimmed.startsWith(ROOT_PATH) ? trimmed : `${ROOT_PATH}${trimmed}`; + const collapsed = withLeadingSlash.replace(/\/{2,}/gu, ROOT_PATH); + + return collapsed.endsWith(ROOT_PATH) ? collapsed : `${collapsed}${ROOT_PATH}`; +} + +export function getAppBaseUrl(baseUrl: string = import.meta.env.BASE_URL): string { + return normalizeBaseUrl(baseUrl); +} + +export function getAppBasePath(baseUrl: string = import.meta.env.BASE_URL): string { + const normalizedBaseUrl = getAppBaseUrl(baseUrl); + return normalizedBaseUrl === ROOT_PATH ? "" : normalizedBaseUrl.slice(0, -1); +} + +export function stripBasePath( + pathname: string, + baseUrl: string = import.meta.env.BASE_URL, +): string { + const basePath = getAppBasePath(baseUrl); + + if (basePath === "") { + return pathname; + } + + if (pathname === basePath) { + return ROOT_PATH; + } + + if (pathname.startsWith(`${basePath}${ROOT_PATH}`)) { + return pathname.slice(basePath.length); + } + + return pathname; +} + +export function buildAppPath(path = "", baseUrl: string = import.meta.env.BASE_URL): string { + const basePath = getAppBasePath(baseUrl); + const normalizedPath = path.replace(/^\/+/u, ""); + + if (normalizedPath === "") { + return basePath === "" ? ROOT_PATH : `${basePath}${ROOT_PATH}`; + } + + return `${basePath}${ROOT_PATH}${normalizedPath}`; +} + +export function buildAbsoluteAppUrl( + path = "", + options?: { + baseUrl?: string; + origin?: string; + }, +): string { + const appPath = buildAppPath(path, options?.baseUrl); + const origin = options?.origin; + + if (typeof origin === "string" && origin.trim().length > 0) { + return new URL(appPath, origin).toString(); + } + + if (typeof window === "undefined") { + return appPath; + } + + return new URL(appPath, window.location.origin).toString(); +} diff --git a/src/utils/routing/urlManager.ts b/src/utils/routing/urlManager.ts index 5e0b4d4..4f27225 100644 --- a/src/utils/routing/urlManager.ts +++ b/src/utils/routing/urlManager.ts @@ -1,5 +1,6 @@ import { GitHub } from "@/services/github"; import { logger } from "../index"; +import { buildAppPath, stripBasePath } from "./basePath"; /** * 验证路径格式 @@ -25,7 +26,7 @@ function isValidPath(path: string): boolean { export function getPathFromUrl(): string { try { // 首先尝试从路径段获取 - let pathname = window.location.pathname; + let pathname = stripBasePath(window.location.pathname); // 移除开头的斜杠 if (pathname.startsWith("/")) { @@ -133,7 +134,7 @@ function buildUrl(path: string, preview?: string, branch?: string): UrlBuildResu const encodedPath = path.length > 0 ? encodeURI(path) : ""; // 基础 URL 是路径 - let url = `/${encodedPath}`; + let url = buildAppPath(encodedPath); const branchValue = branch ?? GitHub.Branch.getCurrentBranch(); const activeBranch = branchValue.trim(); diff --git a/src/vite-env.d.ts b/src/vite-env.d.ts index ce2d5d6..5f3fe11 100644 --- a/src/vite-env.d.ts +++ b/src/vite-env.d.ts @@ -56,6 +56,7 @@ interface ImportMetaEnv { readonly VITE_USE_TOKEN_MODE?: string; readonly VITE_DEVELOPER_MODE?: string; readonly VITE_CONSOLE_LOGGING?: string; + readonly VITE_BASE_PATH?: string; // GitHub仓库变量(双向同步) readonly GITHUB_REPO_OWNER?: string; diff --git a/vite.config.ts b/vite.config.ts index f863cde..b89afb6 100644 --- a/vite.config.ts +++ b/vite.config.ts @@ -118,6 +118,17 @@ const decodeUrl = (url: string | undefined): string => { } }; +const normalizeBaseUrl = (value: string | undefined): string => { + const trimmed = value?.trim() ?? ""; + + if (trimmed === "" || trimmed === "/") { + return "/"; + } + + const withLeadingSlash = trimmed.startsWith("/") ? trimmed : `/${trimmed}`; + return withLeadingSlash.endsWith("/") ? withLeadingSlash : `${withLeadingSlash}/`; +}; + const createRequestLoggerMiddleware = (logger: Logger) => ({ onProxyReq(_proxyReq: http.ClientRequest, req: http.IncomingMessage) { const method = req.method || "UNKNOWN"; @@ -263,14 +274,6 @@ const generateBuildArtifacts = async (logger: Logger): Promise => { ); }; -const createBuildArtifactsPlugin = (logger: Logger) => ({ - name: "repo-build-artifacts", - apply: "build" as const, - async buildStart() { - await generateBuildArtifacts(logger); - }, -}); - const createProxyConfig = (requestLogger: ReturnType) => ({ "/github-api": { target: "https://api.github.com", @@ -466,6 +469,14 @@ const createVercelApiHandlerPlugin = (logger: Logger) => ({ }, }); +const createBuildArtifactsPlugin = (logger: Logger) => ({ + name: "repo-build-artifacts", + apply: "build" as const, + async buildStart() { + await generateBuildArtifacts(logger); + }, +}); + const mode = process.env.MODE ?? process.env.NODE_ENV ?? "development"; const env = loadEnv(mode, process.cwd(), ""); const isProdLike = mode === "production" || process.env.NODE_ENV === "production"; @@ -473,10 +484,12 @@ const isProdLike = mode === "production" || process.env.NODE_ENV === "production applyEnvMappingForVite(env, isProdLike); const DEVELOPER_MODE = (env.VITE_DEVELOPER_MODE || env.DEVELOPER_MODE) === "true"; +const APP_BASE_URL = normalizeBaseUrl(env.VITE_BASE_PATH ?? process.env.VITE_BASE_PATH); const logger = createLogger(DEVELOPER_MODE); const requestLogger = createRequestLoggerMiddleware(logger); export default defineConfig({ + base: APP_BASE_URL, lint: { plugins: ["typescript", "unicorn", "react"], ignorePatterns: [ @@ -525,7 +538,7 @@ export default defineConfig({ }, }, test: { - include: ["src/**/*.test.ts"], + include: ["src/**/*.test.ts", "api/**/*.test.ts"], environment: "node", }, run: {