Implement file storage layer with local provider, upload/download API, tests

- StorageProvider interface with LocalProvider (S3 placeholder)
- File table with entity_type/entity_id references, content type, path
- POST /v1/files (multipart upload), GET /v1/files (list by entity),
  GET /v1/files/:id (metadata), GET /v1/files/serve/* (content),
  DELETE /v1/files/:id
- member_identifier drops base64 columns, uses file_id FKs
- File validation: type whitelist, size limits, per-entity max
- Fastify storage plugin injects provider into app
- 6 API tests for upload, list, get, delete, validation
- Test runner kills stale port before starting backend
This commit is contained in:
Ryan Moon
2026-03-28 15:29:06 -05:00
parent de4d2e0a32
commit 760e995ae3
19 changed files with 615 additions and 6 deletions

View File

@@ -607,8 +607,8 @@ export const MemberIdentifierService = {
issuingAuthority: input.issuingAuthority,
issuedDate: input.issuedDate,
expiresAt: input.expiresAt,
imageFrontUrl: input.imageFrontUrl,
imageBackUrl: input.imageBackUrl,
imageFrontFileId: input.imageFrontFileId,
imageBackFileId: input.imageBackFileId,
notes: input.notes,
isPrimary: input.isPrimary,
})

View File

@@ -0,0 +1,139 @@
import { eq, and, count } from 'drizzle-orm'
import type { PostgresJsDatabase } from 'drizzle-orm/postgres-js'
import { files } from '../db/schema/files.js'
import type { StorageProvider } from '../storage/index.js'
import { randomUUID } from 'crypto'
const ALLOWED_IMAGE_TYPES = ['image/jpeg', 'image/png', 'image/webp']
const ALLOWED_PDF_TYPES = ['application/pdf']
const ALLOWED_TYPES = [...ALLOWED_IMAGE_TYPES, ...ALLOWED_PDF_TYPES]
const MAX_IMAGE_SIZE = 10 * 1024 * 1024 // 10 MB
const MAX_PDF_SIZE = 25 * 1024 * 1024 // 25 MB
const MAX_FILES_PER_ENTITY = 20
function getExtension(contentType: string): string {
const map: Record<string, string> = {
'image/jpeg': 'jpg',
'image/png': 'png',
'image/webp': 'webp',
'application/pdf': 'pdf',
}
return map[contentType] ?? 'bin'
}
export const FileService = {
async upload(
db: PostgresJsDatabase,
storage: StorageProvider,
companyId: string,
input: {
data: Buffer
filename: string
contentType: string
entityType: string
entityId: string
category: string
uploadedBy?: string
},
) {
// Validate content type
if (!ALLOWED_TYPES.includes(input.contentType)) {
throw new Error(`File type not allowed: ${input.contentType}`)
}
// Validate size
const maxSize = ALLOWED_IMAGE_TYPES.includes(input.contentType) ? MAX_IMAGE_SIZE : MAX_PDF_SIZE
if (input.data.length > maxSize) {
throw new Error(`File too large: ${input.data.length} bytes (max ${maxSize})`)
}
// Check per-entity limit
const [existing] = await db
.select({ total: count() })
.from(files)
.where(
and(
eq(files.companyId, companyId),
eq(files.entityType, input.entityType),
eq(files.entityId, input.entityId),
),
)
if (existing.total >= MAX_FILES_PER_ENTITY) {
throw new Error(`Maximum ${MAX_FILES_PER_ENTITY} files per entity`)
}
// Generate path
const fileId = randomUUID()
const ext = getExtension(input.contentType)
const path = `${companyId}/${input.entityType}/${input.entityId}/${input.category}-${fileId}.${ext}`
// Write to storage
await storage.put(path, input.data, input.contentType)
// Insert record
const [file] = await db
.insert(files)
.values({
id: fileId,
companyId,
path,
filename: input.filename,
contentType: input.contentType,
sizeBytes: input.data.length,
entityType: input.entityType,
entityId: input.entityId,
category: input.category,
uploadedBy: input.uploadedBy,
})
.returning()
return file
},
async getById(db: PostgresJsDatabase, companyId: string, id: string) {
const [file] = await db
.select()
.from(files)
.where(and(eq(files.id, id), eq(files.companyId, companyId)))
.limit(1)
return file ?? null
},
async listByEntity(
db: PostgresJsDatabase,
companyId: string,
entityType: string,
entityId: string,
) {
return db
.select()
.from(files)
.where(
and(
eq(files.companyId, companyId),
eq(files.entityType, entityType),
eq(files.entityId, entityId),
),
)
.orderBy(files.createdAt)
},
async delete(
db: PostgresJsDatabase,
storage: StorageProvider,
companyId: string,
id: string,
) {
const file = await this.getById(db, companyId, id)
if (!file) return null
await storage.delete(file.path)
const [deleted] = await db
.delete(files)
.where(and(eq(files.id, id), eq(files.companyId, companyId)))
.returning()
return deleted ?? null
},
}