Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
77 changes: 64 additions & 13 deletions core/plugins/built-in/vite/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,28 +20,37 @@ const STATIC_MAX_AGE = 31536000
/** Extensions that carry a content hash in their filename (immutable) */
const HASHED_EXT = /\.[0-9a-f]{8,}\.\w+$/

/** Content types eligible for pre-compressed serving */
const COMPRESSIBLE_TYPES = new Set(['.js', '.css', '.html', '.svg', '.json', '.xml', '.txt', '.map'])

/**
* Recursively collect all files under `dir` as relative paths (e.g. "/assets/app.abc123.js").
* Runs once at startup — in production the build output never changes.
*/
function collectFiles(dir: string, prefix = ''): Map<string, string> {
const map = new Map<string, string>()
try {
const entries = readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
const rel = prefix + '/' + entry.name
if (entry.isDirectory()) {
for (const [k, v] of collectFiles(join(dir, entry.name), rel)) {
map.set(k, v)
}
} else if (entry.isFile()) {
map.set(rel, join(dir, entry.name))
if (!existsSync(dir)) return map

const entries = readdirSync(dir, { withFileTypes: true })
for (const entry of entries) {
const rel = prefix + '/' + entry.name
if (entry.isDirectory()) {
for (const [k, v] of collectFiles(join(dir, entry.name), rel)) {
map.set(k, v)
}
} else if (entry.isFile()) {
map.set(rel, join(dir, entry.name))
}
} catch {}
}
return map
}

/** Get file extension from a path */
function getExtension(path: string): string {
const dotIdx = path.lastIndexOf('.')
return dotIdx === -1 ? '' : path.slice(dotIdx)
}

/** Create static file handler with full in-memory cache */
function createStaticFallback() {
// Discover base directory once
Expand All @@ -60,6 +69,16 @@ function createStaticFallback() {
// Bun.file() handle cache — avoids re-creating handles on repeated requests
const fileCache = new Map<string, ReturnType<typeof Bun.file>>()

// Pre-build set of paths that have .gz counterparts for fast lookup
const gzMap = new Map<string, string>()
for (const [relPath, absPath] of fileMap) {
if (relPath.endsWith('.gz')) continue
const gzAbsPath = absPath + '.gz'
if (fileMap.has(relPath + '.gz') || existsSync(gzAbsPath)) {
gzMap.set(relPath, gzAbsPath)
}
}

return (c: { request?: Request }) => {
const req = c.request
if (!req) return
Expand Down Expand Up @@ -96,6 +115,32 @@ function createStaticFallback() {
// O(1) lookup in pre-scanned file map
const absolutePath = fileMap.get(pathname)
if (absolutePath) {
// Check for pre-compressed .gz version
const acceptEncoding = req.headers.get('accept-encoding') || ''
const ext = getExtension(pathname)
const gzPath = gzMap.get(pathname)

if (gzPath && COMPRESSIBLE_TYPES.has(ext) && acceptEncoding.includes('gzip')) {
let gzFile = fileCache.get(pathname + '.gz')
if (!gzFile) {
gzFile = Bun.file(gzPath)
fileCache.set(pathname + '.gz', gzFile)
}

const originalFile = Bun.file(absolutePath)
const headers: Record<string, string> = {
'Content-Encoding': 'gzip',
'Content-Type': originalFile.type,
'Vary': 'Accept-Encoding',
}

if (HASHED_EXT.test(pathname)) {
headers['Cache-Control'] = `public, max-age=${STATIC_MAX_AGE}, immutable`
}

return new Response(gzFile, { headers })
}

let file = fileCache.get(pathname)
if (!file) {
file = Bun.file(absolutePath)
Expand All @@ -115,8 +160,13 @@ function createStaticFallback() {
}

// SPA fallback: serve index.html for unmatched routes
// Use no-cache so the browser always revalidates (picks up new deploys)
if (indexFile) {
return indexFile
return new Response(indexFile, {
headers: {
'Cache-Control': 'no-cache'
}
})
}
}
}
Expand All @@ -143,7 +193,8 @@ async function proxyToVite(ctx: RequestContext): Promise<void> {
})

ctx.handled = true
ctx.response = new Response(await response.arrayBuffer(), {
// Stream the proxy response instead of buffering it entirely in memory
ctx.response = new Response(response.body, {
status: response.status,
statusText: response.statusText,
headers: response.headers
Expand Down
73 changes: 55 additions & 18 deletions core/server/live/FileUploadManager.ts
Original file line number Diff line number Diff line change
@@ -1,35 +1,72 @@
import { writeFile, mkdir, unlink } from 'fs/promises'
import { existsSync } from 'fs'
import { join, extname } from 'path'
import type {
ActiveUpload,
FileUploadStartMessage,
import type {
ActiveUpload,
FileUploadStartMessage,
FileUploadChunkMessage,
FileUploadCompleteMessage,
FileUploadProgressResponse,
FileUploadCompleteResponse
} from '@core/types/types'

/**
* Sanitize a filename to only contain safe characters.
* Strips path separators, null bytes, and non-ASCII control characters.
* Replaces spaces and special characters with underscores.
*/
function sanitizeFilename(name: string): string {
// Remove null bytes and control characters
let safe = name.replace(/[\x00-\x1f\x7f]/g, '')
// Remove path separators and parent directory references
safe = safe.replace(/[/\\]/g, '').replace(/\.\./g, '')
// Replace spaces and characters that cause issues in URLs/shells
safe = safe.replace(/[<>:"|?*#{}%~&]/g, '_').replace(/\s+/g, '_')
// Collapse multiple underscores
safe = safe.replace(/_+/g, '_')
// Remove leading dots (hidden files)
safe = safe.replace(/^\.+/, '')
// Fallback if name is empty after sanitization
return safe || 'upload'
}

export class FileUploadManager {
private activeUploads = new Map<string, ActiveUpload>()
private readonly maxUploadSize = 500 * 1024 * 1024 // 500MB max (aceita qualquer arquivo)
private readonly maxUploadSize = 500 * 1024 * 1024 // 500MB max
private readonly chunkTimeout = 30000 // 30 seconds timeout per chunk
private readonly allowedTypes: string[] = [] // Array vazio = aceita todos os tipos de arquivo
private readonly allowedTypes: string[] = [] // Empty array = accepts all file types
private cleanupTimer: ReturnType<typeof setInterval> | null = null

constructor() {
// Cleanup stale uploads every 5 minutes
setInterval(() => this.cleanupStaleUploads(), 5 * 60 * 1000)
this.cleanupTimer = setInterval(() => this.cleanupStaleUploads(), 5 * 60 * 1000)
// Allow the timer to not block process exit
if (this.cleanupTimer && typeof this.cleanupTimer === 'object' && 'unref' in this.cleanupTimer) {
this.cleanupTimer.unref()
}
}

/** Stop the background cleanup timer (useful for tests and graceful shutdown) */
dispose(): void {
if (this.cleanupTimer) {
clearInterval(this.cleanupTimer)
this.cleanupTimer = null
}
}

async startUpload(message: FileUploadStartMessage): Promise<{ success: boolean; error?: string }> {
try {
const { uploadId, componentId, filename, fileType, fileSize, chunkSize = 64 * 1024 } = message

// Validate file size (sem restrição de tipo)
// Validate file size
if (fileSize > this.maxUploadSize) {
throw new Error(`File too large: ${fileSize} bytes. Max: ${this.maxUploadSize} bytes`)
}

if (fileSize <= 0) {
throw new Error('File size must be positive')
}

// Check if upload already exists
if (this.activeUploads.has(uploadId)) {
throw new Error(`Upload ${uploadId} already in progress`)
Expand All @@ -42,7 +79,7 @@ export class FileUploadManager {
const upload: ActiveUpload = {
uploadId,
componentId,
filename,
filename: sanitizeFilename(filename),
fileType,
fileSize,
totalChunks,
Expand All @@ -57,7 +94,7 @@ export class FileUploadManager {
console.log('📤 Upload started:', {
uploadId,
componentId,
filename,
filename: upload.filename,
fileType,
fileSize,
totalChunks
Expand Down Expand Up @@ -138,13 +175,13 @@ export class FileUploadManager {
private async finalizeUpload(upload: ActiveUpload): Promise<void> {
try {
console.log(`✅ Upload completed: ${upload.uploadId}`)

// Assemble file from chunks
const fileUrl = await this.assembleFile(upload)

// Cleanup
this.activeUploads.delete(upload.uploadId)

} catch (error: any) {
console.error(`❌ Upload finalization failed for ${upload.uploadId}:`, error.message)
throw error
Expand All @@ -170,7 +207,6 @@ export class FileUploadManager {

console.log(`✅ Upload validation passed: ${uploadId} (${upload.bytesReceived} bytes)`)


// Assemble file from chunks
const fileUrl = await this.assembleFile(upload)

Expand All @@ -189,7 +225,7 @@ export class FileUploadManager {

} catch (error: any) {
console.error(`❌ Upload completion failed for ${message.uploadId}:`, error.message)

return {
type: 'FILE_UPLOAD_COMPLETE',
componentId: '',
Expand All @@ -209,11 +245,12 @@ export class FileUploadManager {
await mkdir(uploadsDir, { recursive: true })
}

// Generate unique filename
// Generate unique filename with sanitized base name
const timestamp = Date.now()
const extension = extname(upload.filename)
const baseName = upload.filename.replace(extension, '')
const safeFilename = `${baseName}_${timestamp}${extension}`
const baseName = upload.filename.slice(0, -extension.length || undefined)
const safeBaseName = sanitizeFilename(baseName)
const safeFilename = `${safeBaseName}_${timestamp}${extension}`
const filePath = join(uploadsDir, safeFilename)

// Assemble chunks in order
Expand Down Expand Up @@ -249,7 +286,7 @@ export class FileUploadManager {

for (const [uploadId, upload] of this.activeUploads) {
const timeSinceLastChunk = now - upload.lastChunkTime

if (timeSinceLastChunk > this.chunkTimeout * 2) {
staleUploads.push(uploadId)
}
Expand Down
Loading
Loading