Implement file storage layer with local provider, upload/download API, tests
- StorageProvider interface with LocalProvider (S3 placeholder) - File table with entity_type/entity_id references, content type, path - POST /v1/files (multipart upload), GET /v1/files (list by entity), GET /v1/files/:id (metadata), GET /v1/files/serve/* (content), DELETE /v1/files/:id - member_identifier drops base64 columns, uses file_id FKs - File validation: type whitelist, size limits, per-entity max - Fastify storage plugin injects provider into app - 6 API tests for upload, list, get, delete, validation - Test runner kills stale port before starting backend
This commit is contained in:
25
packages/backend/src/db/migrations/0012_file_storage.sql
Normal file
25
packages/backend/src/db/migrations/0012_file_storage.sql
Normal file
@@ -0,0 +1,25 @@
|
||||
-- File storage table
|
||||
CREATE TABLE IF NOT EXISTS "file" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
"company_id" uuid NOT NULL REFERENCES "company"("id"),
|
||||
"path" varchar(1000) NOT NULL,
|
||||
"filename" varchar(255) NOT NULL,
|
||||
"content_type" varchar(100) NOT NULL,
|
||||
"size_bytes" integer NOT NULL,
|
||||
"entity_type" varchar(100) NOT NULL,
|
||||
"entity_id" uuid NOT NULL,
|
||||
"category" varchar(100) NOT NULL,
|
||||
"uploaded_by" uuid,
|
||||
"created_at" timestamp with time zone NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX "file_company_path" ON "file" ("company_id", "path");
|
||||
CREATE INDEX "file_entity" ON "file" ("company_id", "entity_type", "entity_id");
|
||||
|
||||
-- Update member_identifier: replace base64 columns with file references
|
||||
ALTER TABLE "member_identifier" DROP COLUMN IF EXISTS "image_front";
|
||||
ALTER TABLE "member_identifier" DROP COLUMN IF EXISTS "image_back";
|
||||
ALTER TABLE "member_identifier" DROP COLUMN IF EXISTS "image_front_url";
|
||||
ALTER TABLE "member_identifier" DROP COLUMN IF EXISTS "image_back_url";
|
||||
ALTER TABLE "member_identifier" ADD COLUMN "image_front_file_id" uuid REFERENCES "file"("id");
|
||||
ALTER TABLE "member_identifier" ADD COLUMN "image_back_file_id" uuid REFERENCES "file"("id");
|
||||
@@ -85,6 +85,13 @@
|
||||
"when": 1774710000000,
|
||||
"tag": "0011_member_address",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 12,
|
||||
"version": "7",
|
||||
"when": 1774720000000,
|
||||
"tag": "0012_file_storage",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -82,8 +82,8 @@ export const memberIdentifiers = pgTable('member_identifier', {
|
||||
issuingAuthority: varchar('issuing_authority', { length: 255 }),
|
||||
issuedDate: date('issued_date'),
|
||||
expiresAt: date('expires_at'),
|
||||
imageFront: text('image_front'),
|
||||
imageBack: text('image_back'),
|
||||
imageFrontFileId: uuid('image_front_file_id'),
|
||||
imageBackFileId: uuid('image_back_file_id'),
|
||||
notes: text('notes'),
|
||||
isPrimary: boolean('is_primary').notNull().default(false),
|
||||
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
|
||||
|
||||
21
packages/backend/src/db/schema/files.ts
Normal file
21
packages/backend/src/db/schema/files.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { pgTable, uuid, varchar, integer, timestamp } from 'drizzle-orm/pg-core'
|
||||
import { companies } from './stores.js'
|
||||
|
||||
export const files = pgTable('file', {
|
||||
id: uuid('id').primaryKey().defaultRandom(),
|
||||
companyId: uuid('company_id')
|
||||
.notNull()
|
||||
.references(() => companies.id),
|
||||
path: varchar('path', { length: 1000 }).notNull(),
|
||||
filename: varchar('filename', { length: 255 }).notNull(),
|
||||
contentType: varchar('content_type', { length: 100 }).notNull(),
|
||||
sizeBytes: integer('size_bytes').notNull(),
|
||||
entityType: varchar('entity_type', { length: 100 }).notNull(),
|
||||
entityId: uuid('entity_id').notNull(),
|
||||
category: varchar('category', { length: 100 }).notNull(),
|
||||
uploadedBy: uuid('uploaded_by'),
|
||||
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
|
||||
})
|
||||
|
||||
export type FileRecord = typeof files.$inferSelect
|
||||
export type FileRecordInsert = typeof files.$inferInsert
|
||||
@@ -6,12 +6,14 @@ import { corsPlugin } from './plugins/cors.js'
|
||||
import { errorHandlerPlugin } from './plugins/error-handler.js'
|
||||
import { authPlugin } from './plugins/auth.js'
|
||||
import { devAuthPlugin } from './plugins/dev-auth.js'
|
||||
import { storagePlugin } from './plugins/storage.js'
|
||||
import { healthRoutes } from './routes/v1/health.js'
|
||||
import { authRoutes } from './routes/v1/auth.js'
|
||||
import { accountRoutes } from './routes/v1/accounts.js'
|
||||
import { inventoryRoutes } from './routes/v1/inventory.js'
|
||||
import { productRoutes } from './routes/v1/products.js'
|
||||
import { lookupRoutes } from './routes/v1/lookups.js'
|
||||
import { fileRoutes } from './routes/v1/files.js'
|
||||
|
||||
export async function buildApp() {
|
||||
const app = Fastify({
|
||||
@@ -28,6 +30,7 @@ export async function buildApp() {
|
||||
await app.register(databasePlugin)
|
||||
await app.register(redisPlugin)
|
||||
await app.register(rateLimit, { global: false })
|
||||
await app.register(storagePlugin)
|
||||
|
||||
// Auth — JWT in production/test, dev bypass only in development without JWT_SECRET
|
||||
if (process.env.JWT_SECRET) {
|
||||
@@ -46,6 +49,7 @@ export async function buildApp() {
|
||||
await app.register(inventoryRoutes, { prefix: '/v1' })
|
||||
await app.register(productRoutes, { prefix: '/v1' })
|
||||
await app.register(lookupRoutes, { prefix: '/v1' })
|
||||
await app.register(fileRoutes, { prefix: '/v1' })
|
||||
|
||||
return app
|
||||
}
|
||||
|
||||
14
packages/backend/src/plugins/storage.ts
Normal file
14
packages/backend/src/plugins/storage.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import fp from 'fastify-plugin'
|
||||
import { createStorageProvider, type StorageProvider } from '../storage/index.js'
|
||||
|
||||
declare module 'fastify' {
|
||||
interface FastifyInstance {
|
||||
storage: StorageProvider
|
||||
}
|
||||
}
|
||||
|
||||
export const storagePlugin = fp(async (app) => {
|
||||
const storage = createStorageProvider()
|
||||
app.decorate('storage', storage)
|
||||
app.log.info(`Storage provider: ${process.env.STORAGE_PROVIDER ?? 'local'}`)
|
||||
})
|
||||
106
packages/backend/src/routes/v1/files.ts
Normal file
106
packages/backend/src/routes/v1/files.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import type { FastifyPluginAsync } from 'fastify'
|
||||
import multipart from '@fastify/multipart'
|
||||
import { FileService } from '../../services/file.service.js'
|
||||
|
||||
export const fileRoutes: FastifyPluginAsync = async (app) => {
|
||||
await app.register(multipart, {
|
||||
limits: {
|
||||
fileSize: 25 * 1024 * 1024, // 25 MB max
|
||||
files: 1,
|
||||
},
|
||||
})
|
||||
|
||||
// List files for an entity
|
||||
app.get('/files', { preHandler: [app.authenticate] }, async (request, reply) => {
|
||||
const { entityType, entityId } = request.query as { entityType?: string; entityId?: string }
|
||||
if (!entityType || !entityId) {
|
||||
return reply.status(400).send({
|
||||
error: { message: 'entityType and entityId query params required', statusCode: 400 },
|
||||
})
|
||||
}
|
||||
|
||||
const fileRecords = await FileService.listByEntity(app.db, request.companyId, entityType, entityId)
|
||||
const data = await Promise.all(
|
||||
fileRecords.map(async (f) => ({ ...f, url: await app.storage.getUrl(f.path) })),
|
||||
)
|
||||
return reply.send({ data })
|
||||
})
|
||||
|
||||
// Upload a file
|
||||
app.post('/files', { preHandler: [app.authenticate] }, async (request, reply) => {
|
||||
const data = await request.file()
|
||||
if (!data) {
|
||||
return reply.status(400).send({ error: { message: 'No file provided', statusCode: 400 } })
|
||||
}
|
||||
|
||||
const entityType = (data.fields.entityType as { value?: string })?.value
|
||||
const entityId = (data.fields.entityId as { value?: string })?.value
|
||||
const category = (data.fields.category as { value?: string })?.value
|
||||
|
||||
if (!entityType || !entityId || !category) {
|
||||
return reply.status(400).send({
|
||||
error: { message: 'entityType, entityId, and category are required', statusCode: 400 },
|
||||
})
|
||||
}
|
||||
|
||||
const buffer = await data.toBuffer()
|
||||
|
||||
try {
|
||||
const file = await FileService.upload(app.db, app.storage, request.companyId, {
|
||||
data: buffer,
|
||||
filename: data.filename,
|
||||
contentType: data.mimetype,
|
||||
entityType,
|
||||
entityId,
|
||||
category,
|
||||
uploadedBy: request.user.id,
|
||||
})
|
||||
const url = await app.storage.getUrl(file.path)
|
||||
return reply.status(201).send({ ...file, url })
|
||||
} catch (err) {
|
||||
if (err instanceof Error && (err.message.includes('not allowed') || err.message.includes('too large') || err.message.includes('Maximum'))) {
|
||||
return reply.status(400).send({ error: { message: err.message, statusCode: 400 } })
|
||||
}
|
||||
throw err
|
||||
}
|
||||
})
|
||||
|
||||
// Serve file content (for local provider)
|
||||
app.get('/files/serve/*', { preHandler: [app.authenticate] }, async (request, reply) => {
|
||||
const filePath = (request.params as { '*': string })['*']
|
||||
if (!filePath) {
|
||||
return reply.status(400).send({ error: { message: 'Path required', statusCode: 400 } })
|
||||
}
|
||||
|
||||
try {
|
||||
const data = await app.storage.get(filePath)
|
||||
const ext = filePath.split('.').pop()?.toLowerCase()
|
||||
const contentTypeMap: Record<string, string> = {
|
||||
jpg: 'image/jpeg', jpeg: 'image/jpeg', png: 'image/png', webp: 'image/webp', pdf: 'application/pdf',
|
||||
}
|
||||
return reply
|
||||
.header('Content-Type', contentTypeMap[ext ?? ''] ?? 'application/octet-stream')
|
||||
.header('Cache-Control', 'private, max-age=3600')
|
||||
.send(data)
|
||||
} catch {
|
||||
return reply.status(404).send({ error: { message: 'File not found', statusCode: 404 } })
|
||||
}
|
||||
})
|
||||
|
||||
// Get file metadata
|
||||
app.get('/files/:id', { preHandler: [app.authenticate] }, async (request, reply) => {
|
||||
const { id } = request.params as { id: string }
|
||||
const file = await FileService.getById(app.db, request.companyId, id)
|
||||
if (!file) return reply.status(404).send({ error: { message: 'File not found', statusCode: 404 } })
|
||||
const url = await app.storage.getUrl(file.path)
|
||||
return reply.send({ ...file, url })
|
||||
})
|
||||
|
||||
// Delete a file
|
||||
app.delete('/files/:id', { preHandler: [app.authenticate] }, async (request, reply) => {
|
||||
const { id } = request.params as { id: string }
|
||||
const file = await FileService.delete(app.db, app.storage, request.companyId, id)
|
||||
if (!file) return reply.status(404).send({ error: { message: 'File not found', statusCode: 404 } })
|
||||
return reply.send(file)
|
||||
})
|
||||
}
|
||||
@@ -607,8 +607,8 @@ export const MemberIdentifierService = {
|
||||
issuingAuthority: input.issuingAuthority,
|
||||
issuedDate: input.issuedDate,
|
||||
expiresAt: input.expiresAt,
|
||||
imageFrontUrl: input.imageFrontUrl,
|
||||
imageBackUrl: input.imageBackUrl,
|
||||
imageFrontFileId: input.imageFrontFileId,
|
||||
imageBackFileId: input.imageBackFileId,
|
||||
notes: input.notes,
|
||||
isPrimary: input.isPrimary,
|
||||
})
|
||||
|
||||
139
packages/backend/src/services/file.service.ts
Normal file
139
packages/backend/src/services/file.service.ts
Normal file
@@ -0,0 +1,139 @@
|
||||
import { eq, and, count } from 'drizzle-orm'
|
||||
import type { PostgresJsDatabase } from 'drizzle-orm/postgres-js'
|
||||
import { files } from '../db/schema/files.js'
|
||||
import type { StorageProvider } from '../storage/index.js'
|
||||
import { randomUUID } from 'crypto'
|
||||
|
||||
const ALLOWED_IMAGE_TYPES = ['image/jpeg', 'image/png', 'image/webp']
|
||||
const ALLOWED_PDF_TYPES = ['application/pdf']
|
||||
const ALLOWED_TYPES = [...ALLOWED_IMAGE_TYPES, ...ALLOWED_PDF_TYPES]
|
||||
const MAX_IMAGE_SIZE = 10 * 1024 * 1024 // 10 MB
|
||||
const MAX_PDF_SIZE = 25 * 1024 * 1024 // 25 MB
|
||||
const MAX_FILES_PER_ENTITY = 20
|
||||
|
||||
function getExtension(contentType: string): string {
|
||||
const map: Record<string, string> = {
|
||||
'image/jpeg': 'jpg',
|
||||
'image/png': 'png',
|
||||
'image/webp': 'webp',
|
||||
'application/pdf': 'pdf',
|
||||
}
|
||||
return map[contentType] ?? 'bin'
|
||||
}
|
||||
|
||||
export const FileService = {
|
||||
async upload(
|
||||
db: PostgresJsDatabase,
|
||||
storage: StorageProvider,
|
||||
companyId: string,
|
||||
input: {
|
||||
data: Buffer
|
||||
filename: string
|
||||
contentType: string
|
||||
entityType: string
|
||||
entityId: string
|
||||
category: string
|
||||
uploadedBy?: string
|
||||
},
|
||||
) {
|
||||
// Validate content type
|
||||
if (!ALLOWED_TYPES.includes(input.contentType)) {
|
||||
throw new Error(`File type not allowed: ${input.contentType}`)
|
||||
}
|
||||
|
||||
// Validate size
|
||||
const maxSize = ALLOWED_IMAGE_TYPES.includes(input.contentType) ? MAX_IMAGE_SIZE : MAX_PDF_SIZE
|
||||
if (input.data.length > maxSize) {
|
||||
throw new Error(`File too large: ${input.data.length} bytes (max ${maxSize})`)
|
||||
}
|
||||
|
||||
// Check per-entity limit
|
||||
const [existing] = await db
|
||||
.select({ total: count() })
|
||||
.from(files)
|
||||
.where(
|
||||
and(
|
||||
eq(files.companyId, companyId),
|
||||
eq(files.entityType, input.entityType),
|
||||
eq(files.entityId, input.entityId),
|
||||
),
|
||||
)
|
||||
if (existing.total >= MAX_FILES_PER_ENTITY) {
|
||||
throw new Error(`Maximum ${MAX_FILES_PER_ENTITY} files per entity`)
|
||||
}
|
||||
|
||||
// Generate path
|
||||
const fileId = randomUUID()
|
||||
const ext = getExtension(input.contentType)
|
||||
const path = `${companyId}/${input.entityType}/${input.entityId}/${input.category}-${fileId}.${ext}`
|
||||
|
||||
// Write to storage
|
||||
await storage.put(path, input.data, input.contentType)
|
||||
|
||||
// Insert record
|
||||
const [file] = await db
|
||||
.insert(files)
|
||||
.values({
|
||||
id: fileId,
|
||||
companyId,
|
||||
path,
|
||||
filename: input.filename,
|
||||
contentType: input.contentType,
|
||||
sizeBytes: input.data.length,
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
category: input.category,
|
||||
uploadedBy: input.uploadedBy,
|
||||
})
|
||||
.returning()
|
||||
|
||||
return file
|
||||
},
|
||||
|
||||
async getById(db: PostgresJsDatabase, companyId: string, id: string) {
|
||||
const [file] = await db
|
||||
.select()
|
||||
.from(files)
|
||||
.where(and(eq(files.id, id), eq(files.companyId, companyId)))
|
||||
.limit(1)
|
||||
return file ?? null
|
||||
},
|
||||
|
||||
async listByEntity(
|
||||
db: PostgresJsDatabase,
|
||||
companyId: string,
|
||||
entityType: string,
|
||||
entityId: string,
|
||||
) {
|
||||
return db
|
||||
.select()
|
||||
.from(files)
|
||||
.where(
|
||||
and(
|
||||
eq(files.companyId, companyId),
|
||||
eq(files.entityType, entityType),
|
||||
eq(files.entityId, entityId),
|
||||
),
|
||||
)
|
||||
.orderBy(files.createdAt)
|
||||
},
|
||||
|
||||
async delete(
|
||||
db: PostgresJsDatabase,
|
||||
storage: StorageProvider,
|
||||
companyId: string,
|
||||
id: string,
|
||||
) {
|
||||
const file = await this.getById(db, companyId, id)
|
||||
if (!file) return null
|
||||
|
||||
await storage.delete(file.path)
|
||||
|
||||
const [deleted] = await db
|
||||
.delete(files)
|
||||
.where(and(eq(files.id, id), eq(files.companyId, companyId)))
|
||||
.returning()
|
||||
|
||||
return deleted ?? null
|
||||
},
|
||||
}
|
||||
23
packages/backend/src/storage/index.ts
Normal file
23
packages/backend/src/storage/index.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { LocalStorageProvider } from './local.js'
|
||||
import type { StorageProvider } from './provider.js'
|
||||
|
||||
export type { StorageProvider }
|
||||
|
||||
export function createStorageProvider(): StorageProvider {
|
||||
const provider = process.env.STORAGE_PROVIDER ?? 'local'
|
||||
|
||||
if (provider === 'local') {
|
||||
const root = process.env.STORAGE_LOCAL_PATH ?? './data/files'
|
||||
const baseUrl = `http://localhost:${process.env.PORT ?? '8000'}`
|
||||
return new LocalStorageProvider(root, baseUrl)
|
||||
}
|
||||
|
||||
if (provider === 's3') {
|
||||
// Lazy import to avoid requiring @aws-sdk when using local
|
||||
throw new Error(
|
||||
'S3 provider requires @aws-sdk/client-s3. Install it and update this factory.',
|
||||
)
|
||||
}
|
||||
|
||||
throw new Error(`Unknown storage provider: ${provider}`)
|
||||
}
|
||||
48
packages/backend/src/storage/local.ts
Normal file
48
packages/backend/src/storage/local.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { mkdir, readFile, writeFile, unlink, access } from 'fs/promises'
|
||||
import { dirname, join } from 'path'
|
||||
import type { StorageProvider } from './provider.js'
|
||||
|
||||
export class LocalStorageProvider implements StorageProvider {
|
||||
private root: string
|
||||
private baseUrl: string
|
||||
|
||||
constructor(root: string, baseUrl: string) {
|
||||
this.root = root
|
||||
this.baseUrl = baseUrl
|
||||
}
|
||||
|
||||
private fullPath(path: string): string {
|
||||
return join(this.root, path)
|
||||
}
|
||||
|
||||
async put(path: string, data: Buffer, _contentType: string): Promise<void> {
|
||||
const fullPath = this.fullPath(path)
|
||||
await mkdir(dirname(fullPath), { recursive: true })
|
||||
await writeFile(fullPath, data)
|
||||
}
|
||||
|
||||
async get(path: string): Promise<Buffer> {
|
||||
return readFile(this.fullPath(path))
|
||||
}
|
||||
|
||||
async delete(path: string): Promise<void> {
|
||||
try {
|
||||
await unlink(this.fullPath(path))
|
||||
} catch (err: unknown) {
|
||||
if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err
|
||||
}
|
||||
}
|
||||
|
||||
async exists(path: string): Promise<boolean> {
|
||||
try {
|
||||
await access(this.fullPath(path))
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
async getUrl(path: string, _expiresIn?: number): Promise<string> {
|
||||
return `${this.baseUrl}/v1/files/serve/${encodeURIComponent(path)}`
|
||||
}
|
||||
}
|
||||
7
packages/backend/src/storage/provider.ts
Normal file
7
packages/backend/src/storage/provider.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
export interface StorageProvider {
|
||||
put(path: string, data: Buffer, contentType: string): Promise<void>
|
||||
get(path: string): Promise<Buffer>
|
||||
delete(path: string): Promise<void>
|
||||
exists(path: string): Promise<boolean>
|
||||
getUrl(path: string, expiresIn?: number): Promise<string>
|
||||
}
|
||||
47
packages/backend/src/storage/s3.ts
Normal file
47
packages/backend/src/storage/s3.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import type { StorageProvider } from './provider.js'
|
||||
|
||||
// S3 provider — requires @aws-sdk/client-s3 (install when needed)
|
||||
// This is a placeholder that documents the interface. Install the SDK
|
||||
// and uncomment when deploying with S3.
|
||||
|
||||
export class S3StorageProvider implements StorageProvider {
|
||||
private bucket: string
|
||||
private region: string
|
||||
private endpoint?: string
|
||||
|
||||
constructor(config: {
|
||||
bucket: string
|
||||
region: string
|
||||
endpoint?: string
|
||||
accessKey: string
|
||||
secretKey: string
|
||||
}) {
|
||||
this.bucket = config.bucket
|
||||
this.region = config.region
|
||||
this.endpoint = config.endpoint
|
||||
// TODO: initialize S3Client from @aws-sdk/client-s3
|
||||
throw new Error(
|
||||
'S3 provider not yet implemented. Install @aws-sdk/client-s3 and implement.',
|
||||
)
|
||||
}
|
||||
|
||||
async put(_path: string, _data: Buffer, _contentType: string): Promise<void> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async get(_path: string): Promise<Buffer> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async delete(_path: string): Promise<void> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async exists(_path: string): Promise<boolean> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async getUrl(_path: string, _expiresIn?: number): Promise<string> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user