Implement file storage layer with local provider, upload/download API, tests

- StorageProvider interface with LocalProvider (S3 placeholder)
- File table with entity_type/entity_id references, content type, path
- POST /v1/files (multipart upload), GET /v1/files (list by entity),
  GET /v1/files/:id (metadata), GET /v1/files/serve/* (content),
  DELETE /v1/files/:id
- member_identifier drops base64 columns, uses file_id FKs
- File validation: type whitelist, size limits, per-entity max
- Fastify storage plugin injects provider into app
- 6 API tests for upload, list, get, delete, validation
- Test runner kills stale port before starting backend
This commit is contained in:
Ryan Moon
2026-03-28 15:29:06 -05:00
parent de4d2e0a32
commit 760e995ae3
19 changed files with 615 additions and 6 deletions

View File

@@ -59,6 +59,7 @@
"dependencies": { "dependencies": {
"@fastify/cors": "^10", "@fastify/cors": "^10",
"@fastify/jwt": "^9", "@fastify/jwt": "^9",
"@fastify/multipart": "^9.4.0",
"@fastify/rate-limit": "^10.3.0", "@fastify/rate-limit": "^10.3.0",
"@forte/shared": "workspace:*", "@forte/shared": "workspace:*",
"bcrypt": "^6", "bcrypt": "^6",
@@ -209,8 +210,12 @@
"@fastify/ajv-compiler": ["@fastify/ajv-compiler@4.0.5", "", { "dependencies": { "ajv": "^8.12.0", "ajv-formats": "^3.0.1", "fast-uri": "^3.0.0" } }, "sha512-KoWKW+MhvfTRWL4qrhUwAAZoaChluo0m0vbiJlGMt2GXvL4LVPQEjt8kSpHI3IBq5Rez8fg+XeH3cneztq+C7A=="], "@fastify/ajv-compiler": ["@fastify/ajv-compiler@4.0.5", "", { "dependencies": { "ajv": "^8.12.0", "ajv-formats": "^3.0.1", "fast-uri": "^3.0.0" } }, "sha512-KoWKW+MhvfTRWL4qrhUwAAZoaChluo0m0vbiJlGMt2GXvL4LVPQEjt8kSpHI3IBq5Rez8fg+XeH3cneztq+C7A=="],
"@fastify/busboy": ["@fastify/busboy@3.2.0", "", {}, "sha512-m9FVDXU3GT2ITSe0UaMA5rU3QkfC/UXtCU8y0gSN/GugTqtVldOBWIB5V6V3sbmenVZUIpU6f+mPEO2+m5iTaA=="],
"@fastify/cors": ["@fastify/cors@10.1.0", "", { "dependencies": { "fastify-plugin": "^5.0.0", "mnemonist": "0.40.0" } }, "sha512-MZyBCBJtII60CU9Xme/iE4aEy8G7QpzGR8zkdXZkDFt7ElEMachbE61tfhAG/bvSaULlqlf0huMT12T7iqEmdQ=="], "@fastify/cors": ["@fastify/cors@10.1.0", "", { "dependencies": { "fastify-plugin": "^5.0.0", "mnemonist": "0.40.0" } }, "sha512-MZyBCBJtII60CU9Xme/iE4aEy8G7QpzGR8zkdXZkDFt7ElEMachbE61tfhAG/bvSaULlqlf0huMT12T7iqEmdQ=="],
"@fastify/deepmerge": ["@fastify/deepmerge@3.2.1", "", {}, "sha512-N5Oqvltoa2r9z1tbx4xjky0oRR60v+T47Ic4J1ukoVQcptLOrIdRnCSdTGmOmajZuHVKlTnfcmrjyqsGEW1ztA=="],
"@fastify/error": ["@fastify/error@4.2.0", "", {}, "sha512-RSo3sVDXfHskiBZKBPRgnQTtIqpi/7zhJOEmAxCiBcM7d0uwdGdxLlsCaLzGs8v8NnxIRlfG0N51p5yFaOentQ=="], "@fastify/error": ["@fastify/error@4.2.0", "", {}, "sha512-RSo3sVDXfHskiBZKBPRgnQTtIqpi/7zhJOEmAxCiBcM7d0uwdGdxLlsCaLzGs8v8NnxIRlfG0N51p5yFaOentQ=="],
"@fastify/fast-json-stringify-compiler": ["@fastify/fast-json-stringify-compiler@5.0.3", "", { "dependencies": { "fast-json-stringify": "^6.0.0" } }, "sha512-uik7yYHkLr6fxd8hJSZ8c+xF4WafPK+XzneQDPU+D10r5X19GW8lJcom2YijX2+qtFF1ENJlHXKFM9ouXNJYgQ=="], "@fastify/fast-json-stringify-compiler": ["@fastify/fast-json-stringify-compiler@5.0.3", "", { "dependencies": { "fast-json-stringify": "^6.0.0" } }, "sha512-uik7yYHkLr6fxd8hJSZ8c+xF4WafPK+XzneQDPU+D10r5X19GW8lJcom2YijX2+qtFF1ENJlHXKFM9ouXNJYgQ=="],
@@ -221,6 +226,8 @@
"@fastify/merge-json-schemas": ["@fastify/merge-json-schemas@0.2.1", "", { "dependencies": { "dequal": "^2.0.3" } }, "sha512-OA3KGBCy6KtIvLf8DINC5880o5iBlDX4SxzLQS8HorJAbqluzLRn80UXU0bxZn7UOFhFgpRJDasfwn9nG4FG4A=="], "@fastify/merge-json-schemas": ["@fastify/merge-json-schemas@0.2.1", "", { "dependencies": { "dequal": "^2.0.3" } }, "sha512-OA3KGBCy6KtIvLf8DINC5880o5iBlDX4SxzLQS8HorJAbqluzLRn80UXU0bxZn7UOFhFgpRJDasfwn9nG4FG4A=="],
"@fastify/multipart": ["@fastify/multipart@9.4.0", "", { "dependencies": { "@fastify/busboy": "^3.0.0", "@fastify/deepmerge": "^3.0.0", "@fastify/error": "^4.0.0", "fastify-plugin": "^5.0.0", "secure-json-parse": "^4.0.0" } }, "sha512-Z404bzZeLSXTBmp/trCBuoVFX28pM7rhv849Q5TsbTFZHuk1lc4QjQITTPK92DKVpXmNtJXeHSSc7GYvqFpxAQ=="],
"@fastify/proxy-addr": ["@fastify/proxy-addr@5.1.0", "", { "dependencies": { "@fastify/forwarded": "^3.0.0", "ipaddr.js": "^2.1.0" } }, "sha512-INS+6gh91cLUjB+PVHfu1UqcB76Sqtpyp7bnL+FYojhjygvOPA9ctiD/JDKsyD9Xgu4hUhCSJBPig/w7duNajw=="], "@fastify/proxy-addr": ["@fastify/proxy-addr@5.1.0", "", { "dependencies": { "@fastify/forwarded": "^3.0.0", "ipaddr.js": "^2.1.0" } }, "sha512-INS+6gh91cLUjB+PVHfu1UqcB76Sqtpyp7bnL+FYojhjygvOPA9ctiD/JDKsyD9Xgu4hUhCSJBPig/w7duNajw=="],
"@fastify/rate-limit": ["@fastify/rate-limit@10.3.0", "", { "dependencies": { "@lukeed/ms": "^2.0.2", "fastify-plugin": "^5.0.0", "toad-cache": "^3.7.0" } }, "sha512-eIGkG9XKQs0nyynatApA3EVrojHOuq4l6fhB4eeCk4PIOeadvOJz9/4w3vGI44Go17uaXOWEcPkaD8kuKm7g6Q=="], "@fastify/rate-limit": ["@fastify/rate-limit@10.3.0", "", { "dependencies": { "@lukeed/ms": "^2.0.2", "fastify-plugin": "^5.0.0", "toad-cache": "^3.7.0" } }, "sha512-eIGkG9XKQs0nyynatApA3EVrojHOuq4l6fhB4eeCk4PIOeadvOJz9/4w3vGI44Go17uaXOWEcPkaD8kuKm7g6Q=="],

View File

@@ -3,6 +3,8 @@ import { type TestResult, printSuiteHeader, printTestResult } from './reporter.j
export interface TestContext { export interface TestContext {
api: ApiClient api: ApiClient
token: string
baseUrl: string
test: (name: string, optsOrFn: { tags?: string[] } | (() => Promise<void>), maybeFn?: () => Promise<void>) => void test: (name: string, optsOrFn: { tags?: string[] } | (() => Promise<void>), maybeFn?: () => Promise<void>) => void
assert: { assert: {
status: (res: ApiResponse, expected: number) => void status: (res: ApiResponse, expected: number) => void
@@ -117,6 +119,8 @@ export async function runSuite(
const ctx: TestContext = { const ctx: TestContext = {
api, api,
token,
baseUrl,
assert: makeAssert(), assert: makeAssert(),
test(name, optsOrFn, maybeFn) { test(name, optsOrFn, maybeFn) {
const opts = typeof optsOrFn === 'function' ? {} : optsOrFn const opts = typeof optsOrFn === 'function' ? {} : optsOrFn

View File

@@ -77,7 +77,16 @@ async function setupDatabase() {
} }
// --- Start backend --- // --- Start backend ---
async function killPort(port: number) {
try {
const { execSync } = await import('child_process')
execSync(`lsof -ti:${port} | xargs kill -9 2>/dev/null || true`, { stdio: 'pipe' })
await new Promise((r) => setTimeout(r, 1000))
} catch {}
}
async function startBackend(): Promise<Subprocess> { async function startBackend(): Promise<Subprocess> {
await killPort(TEST_PORT)
const proc = spawn({ const proc = spawn({
cmd: ['bun', 'run', 'src/main.ts'], cmd: ['bun', 'run', 'src/main.ts'],
cwd: new URL('..', import.meta.url).pathname, cwd: new URL('..', import.meta.url).pathname,
@@ -90,6 +99,7 @@ async function startBackend(): Promise<Subprocess> {
HOST: '0.0.0.0', HOST: '0.0.0.0',
NODE_ENV: 'development', NODE_ENV: 'development',
LOG_LEVEL: 'error', LOG_LEVEL: 'error',
STORAGE_LOCAL_PATH: '/tmp/forte-test-files',
}, },
stdout: 'pipe', stdout: 'pipe',
stderr: 'pipe', stderr: 'pipe',

View File

@@ -0,0 +1,146 @@
import { suite } from '../lib/context.js'
// Helper: create a tiny 1x1 JPEG for testing uploads
const TINY_JPEG = Buffer.from(
'/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEB' +
'AQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/2wBDAQEBAQEBAQEBAQEBAQEBAQEBAQEB' +
'AQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/wAARCAABAAEDASIA' +
'AhEBAxEB/8QAFAABAAAAAAAAAAAAAAAAAAAACf/EABQQAQAAAAAAAAAAAAAAAAAAAAD/xAAUAQEA' +
'AAAAAAAAAAAAAAAAAAAB/8QAFBEBAAAAAAAAAAAAAAAAAAAAAP/aAAwDAQACEQMRAD8AJgA//9k=',
'base64',
)
suite('Files', { tags: ['files', 'storage'] }, (t) => {
t.test('uploads an image file', { tags: ['upload'] }, async () => {
const acct = await t.api.post('/v1/accounts', { name: 'File Test' })
const member = await t.api.post(`/v1/accounts/${acct.data.id}/members`, {
firstName: 'File',
lastName: 'Test',
})
// Upload via multipart
const formData = new FormData()
formData.append('file', new Blob([TINY_JPEG], { type: 'image/jpeg' }), 'test.jpg')
formData.append('entityType', 'member_identifier')
formData.append('entityId', member.data.id)
formData.append('category', 'front')
const res = await fetch(`${t.baseUrl}/v1/files`, {
method: 'POST',
headers: { Authorization: `Bearer ${t.token}` },
body: formData,
})
const data = await res.json()
t.assert.equal(res.status, 201)
t.assert.ok(data.id)
t.assert.equal(data.contentType, 'image/jpeg')
t.assert.equal(data.entityType, 'member_identifier')
t.assert.equal(data.category, 'front')
t.assert.ok(data.url)
})
t.test('lists files for an entity', { tags: ['read'] }, async () => {
const acct = await t.api.post('/v1/accounts', { name: 'File List Test' })
const member = await t.api.post(`/v1/accounts/${acct.data.id}/members`, {
firstName: 'List',
lastName: 'Files',
})
// Upload a file first
const formData = new FormData()
formData.append('file', new Blob([TINY_JPEG], { type: 'image/jpeg' }), 'list-test.jpg')
formData.append('entityType', 'member_identifier')
formData.append('entityId', member.data.id)
formData.append('category', 'back')
await fetch(`${t.baseUrl}/v1/files`, {
method: 'POST',
headers: { Authorization: `Bearer ${t.token}` },
body: formData,
})
const res = await t.api.get('/v1/files', {
entityType: 'member_identifier',
entityId: member.data.id,
})
t.assert.status(res, 200)
t.assert.greaterThan(res.data.data.length, 0)
t.assert.ok(res.data.data[0].url)
})
t.test('gets file metadata by id', { tags: ['read'] }, async () => {
const acct = await t.api.post('/v1/accounts', { name: 'File Get Test' })
const member = await t.api.post(`/v1/accounts/${acct.data.id}/members`, {
firstName: 'Get',
lastName: 'File',
})
const formData = new FormData()
formData.append('file', new Blob([TINY_JPEG], { type: 'image/jpeg' }), 'get-test.jpg')
formData.append('entityType', 'member_identifier')
formData.append('entityId', member.data.id)
formData.append('category', 'front')
const uploadRes = await fetch(`${t.baseUrl}/v1/files`, {
method: 'POST',
headers: { Authorization: `Bearer ${t.token}` },
body: formData,
})
const uploaded = await uploadRes.json()
const res = await t.api.get(`/v1/files/${uploaded.id}`)
t.assert.status(res, 200)
t.assert.equal(res.data.id, uploaded.id)
t.assert.equal(res.data.filename, 'get-test.jpg')
})
t.test('deletes a file', { tags: ['delete'] }, async () => {
const acct = await t.api.post('/v1/accounts', { name: 'File Delete Test' })
const member = await t.api.post(`/v1/accounts/${acct.data.id}/members`, {
firstName: 'Delete',
lastName: 'File',
})
const formData = new FormData()
formData.append('file', new Blob([TINY_JPEG], { type: 'image/jpeg' }), 'delete-test.jpg')
formData.append('entityType', 'member_identifier')
formData.append('entityId', member.data.id)
formData.append('category', 'front')
const uploadRes = await fetch(`${t.baseUrl}/v1/files`, {
method: 'POST',
headers: { Authorization: `Bearer ${t.token}` },
body: formData,
})
const uploaded = await uploadRes.json()
const res = await t.api.del(`/v1/files/${uploaded.id}`)
t.assert.status(res, 200)
const check = await t.api.get(`/v1/files/${uploaded.id}`)
t.assert.status(check, 404)
})
t.test('rejects unsupported file types', { tags: ['validation'] }, async () => {
const acct = await t.api.post('/v1/accounts', { name: 'File Reject Test' })
const formData = new FormData()
formData.append('file', new Blob(['not an image'], { type: 'text/plain' }), 'test.txt')
formData.append('entityType', 'member_identifier')
formData.append('entityId', acct.data.id)
formData.append('category', 'front')
const res = await fetch(`${t.baseUrl}/v1/files`, {
method: 'POST',
headers: { Authorization: `Bearer ${t.token}` },
body: formData,
})
t.assert.equal(res.status, 400)
})
t.test('returns 404 for missing file', { tags: ['read'] }, async () => {
const res = await t.api.get('/v1/files/a0000000-0000-0000-0000-999999999999')
t.assert.status(res, 404)
})
})

View File

@@ -17,6 +17,7 @@
"dependencies": { "dependencies": {
"@fastify/cors": "^10", "@fastify/cors": "^10",
"@fastify/jwt": "^9", "@fastify/jwt": "^9",
"@fastify/multipart": "^9.4.0",
"@fastify/rate-limit": "^10.3.0", "@fastify/rate-limit": "^10.3.0",
"@forte/shared": "workspace:*", "@forte/shared": "workspace:*",
"bcrypt": "^6", "bcrypt": "^6",

View File

@@ -0,0 +1,25 @@
-- File storage table
CREATE TABLE IF NOT EXISTS "file" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid(),
"company_id" uuid NOT NULL REFERENCES "company"("id"),
"path" varchar(1000) NOT NULL,
"filename" varchar(255) NOT NULL,
"content_type" varchar(100) NOT NULL,
"size_bytes" integer NOT NULL,
"entity_type" varchar(100) NOT NULL,
"entity_id" uuid NOT NULL,
"category" varchar(100) NOT NULL,
"uploaded_by" uuid,
"created_at" timestamp with time zone NOT NULL DEFAULT now()
);
CREATE UNIQUE INDEX "file_company_path" ON "file" ("company_id", "path");
CREATE INDEX "file_entity" ON "file" ("company_id", "entity_type", "entity_id");
-- Update member_identifier: replace base64 columns with file references
ALTER TABLE "member_identifier" DROP COLUMN IF EXISTS "image_front";
ALTER TABLE "member_identifier" DROP COLUMN IF EXISTS "image_back";
ALTER TABLE "member_identifier" DROP COLUMN IF EXISTS "image_front_url";
ALTER TABLE "member_identifier" DROP COLUMN IF EXISTS "image_back_url";
ALTER TABLE "member_identifier" ADD COLUMN "image_front_file_id" uuid REFERENCES "file"("id");
ALTER TABLE "member_identifier" ADD COLUMN "image_back_file_id" uuid REFERENCES "file"("id");

View File

@@ -85,6 +85,13 @@
"when": 1774710000000, "when": 1774710000000,
"tag": "0011_member_address", "tag": "0011_member_address",
"breakpoints": true "breakpoints": true
},
{
"idx": 12,
"version": "7",
"when": 1774720000000,
"tag": "0012_file_storage",
"breakpoints": true
} }
] ]
} }

View File

@@ -82,8 +82,8 @@ export const memberIdentifiers = pgTable('member_identifier', {
issuingAuthority: varchar('issuing_authority', { length: 255 }), issuingAuthority: varchar('issuing_authority', { length: 255 }),
issuedDate: date('issued_date'), issuedDate: date('issued_date'),
expiresAt: date('expires_at'), expiresAt: date('expires_at'),
imageFront: text('image_front'), imageFrontFileId: uuid('image_front_file_id'),
imageBack: text('image_back'), imageBackFileId: uuid('image_back_file_id'),
notes: text('notes'), notes: text('notes'),
isPrimary: boolean('is_primary').notNull().default(false), isPrimary: boolean('is_primary').notNull().default(false),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),

View File

@@ -0,0 +1,21 @@
import { pgTable, uuid, varchar, integer, timestamp } from 'drizzle-orm/pg-core'
import { companies } from './stores.js'
export const files = pgTable('file', {
id: uuid('id').primaryKey().defaultRandom(),
companyId: uuid('company_id')
.notNull()
.references(() => companies.id),
path: varchar('path', { length: 1000 }).notNull(),
filename: varchar('filename', { length: 255 }).notNull(),
contentType: varchar('content_type', { length: 100 }).notNull(),
sizeBytes: integer('size_bytes').notNull(),
entityType: varchar('entity_type', { length: 100 }).notNull(),
entityId: uuid('entity_id').notNull(),
category: varchar('category', { length: 100 }).notNull(),
uploadedBy: uuid('uploaded_by'),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
})
export type FileRecord = typeof files.$inferSelect
export type FileRecordInsert = typeof files.$inferInsert

View File

@@ -6,12 +6,14 @@ import { corsPlugin } from './plugins/cors.js'
import { errorHandlerPlugin } from './plugins/error-handler.js' import { errorHandlerPlugin } from './plugins/error-handler.js'
import { authPlugin } from './plugins/auth.js' import { authPlugin } from './plugins/auth.js'
import { devAuthPlugin } from './plugins/dev-auth.js' import { devAuthPlugin } from './plugins/dev-auth.js'
import { storagePlugin } from './plugins/storage.js'
import { healthRoutes } from './routes/v1/health.js' import { healthRoutes } from './routes/v1/health.js'
import { authRoutes } from './routes/v1/auth.js' import { authRoutes } from './routes/v1/auth.js'
import { accountRoutes } from './routes/v1/accounts.js' import { accountRoutes } from './routes/v1/accounts.js'
import { inventoryRoutes } from './routes/v1/inventory.js' import { inventoryRoutes } from './routes/v1/inventory.js'
import { productRoutes } from './routes/v1/products.js' import { productRoutes } from './routes/v1/products.js'
import { lookupRoutes } from './routes/v1/lookups.js' import { lookupRoutes } from './routes/v1/lookups.js'
import { fileRoutes } from './routes/v1/files.js'
export async function buildApp() { export async function buildApp() {
const app = Fastify({ const app = Fastify({
@@ -28,6 +30,7 @@ export async function buildApp() {
await app.register(databasePlugin) await app.register(databasePlugin)
await app.register(redisPlugin) await app.register(redisPlugin)
await app.register(rateLimit, { global: false }) await app.register(rateLimit, { global: false })
await app.register(storagePlugin)
// Auth — JWT in production/test, dev bypass only in development without JWT_SECRET // Auth — JWT in production/test, dev bypass only in development without JWT_SECRET
if (process.env.JWT_SECRET) { if (process.env.JWT_SECRET) {
@@ -46,6 +49,7 @@ export async function buildApp() {
await app.register(inventoryRoutes, { prefix: '/v1' }) await app.register(inventoryRoutes, { prefix: '/v1' })
await app.register(productRoutes, { prefix: '/v1' }) await app.register(productRoutes, { prefix: '/v1' })
await app.register(lookupRoutes, { prefix: '/v1' }) await app.register(lookupRoutes, { prefix: '/v1' })
await app.register(fileRoutes, { prefix: '/v1' })
return app return app
} }

View File

@@ -0,0 +1,14 @@
import fp from 'fastify-plugin'
import { createStorageProvider, type StorageProvider } from '../storage/index.js'
declare module 'fastify' {
interface FastifyInstance {
storage: StorageProvider
}
}
export const storagePlugin = fp(async (app) => {
const storage = createStorageProvider()
app.decorate('storage', storage)
app.log.info(`Storage provider: ${process.env.STORAGE_PROVIDER ?? 'local'}`)
})

View File

@@ -0,0 +1,106 @@
import type { FastifyPluginAsync } from 'fastify'
import multipart from '@fastify/multipart'
import { FileService } from '../../services/file.service.js'
export const fileRoutes: FastifyPluginAsync = async (app) => {
await app.register(multipart, {
limits: {
fileSize: 25 * 1024 * 1024, // 25 MB max
files: 1,
},
})
// List files for an entity
app.get('/files', { preHandler: [app.authenticate] }, async (request, reply) => {
const { entityType, entityId } = request.query as { entityType?: string; entityId?: string }
if (!entityType || !entityId) {
return reply.status(400).send({
error: { message: 'entityType and entityId query params required', statusCode: 400 },
})
}
const fileRecords = await FileService.listByEntity(app.db, request.companyId, entityType, entityId)
const data = await Promise.all(
fileRecords.map(async (f) => ({ ...f, url: await app.storage.getUrl(f.path) })),
)
return reply.send({ data })
})
// Upload a file
app.post('/files', { preHandler: [app.authenticate] }, async (request, reply) => {
const data = await request.file()
if (!data) {
return reply.status(400).send({ error: { message: 'No file provided', statusCode: 400 } })
}
const entityType = (data.fields.entityType as { value?: string })?.value
const entityId = (data.fields.entityId as { value?: string })?.value
const category = (data.fields.category as { value?: string })?.value
if (!entityType || !entityId || !category) {
return reply.status(400).send({
error: { message: 'entityType, entityId, and category are required', statusCode: 400 },
})
}
const buffer = await data.toBuffer()
try {
const file = await FileService.upload(app.db, app.storage, request.companyId, {
data: buffer,
filename: data.filename,
contentType: data.mimetype,
entityType,
entityId,
category,
uploadedBy: request.user.id,
})
const url = await app.storage.getUrl(file.path)
return reply.status(201).send({ ...file, url })
} catch (err) {
if (err instanceof Error && (err.message.includes('not allowed') || err.message.includes('too large') || err.message.includes('Maximum'))) {
return reply.status(400).send({ error: { message: err.message, statusCode: 400 } })
}
throw err
}
})
// Serve file content (for local provider)
app.get('/files/serve/*', { preHandler: [app.authenticate] }, async (request, reply) => {
const filePath = (request.params as { '*': string })['*']
if (!filePath) {
return reply.status(400).send({ error: { message: 'Path required', statusCode: 400 } })
}
try {
const data = await app.storage.get(filePath)
const ext = filePath.split('.').pop()?.toLowerCase()
const contentTypeMap: Record<string, string> = {
jpg: 'image/jpeg', jpeg: 'image/jpeg', png: 'image/png', webp: 'image/webp', pdf: 'application/pdf',
}
return reply
.header('Content-Type', contentTypeMap[ext ?? ''] ?? 'application/octet-stream')
.header('Cache-Control', 'private, max-age=3600')
.send(data)
} catch {
return reply.status(404).send({ error: { message: 'File not found', statusCode: 404 } })
}
})
// Get file metadata
app.get('/files/:id', { preHandler: [app.authenticate] }, async (request, reply) => {
const { id } = request.params as { id: string }
const file = await FileService.getById(app.db, request.companyId, id)
if (!file) return reply.status(404).send({ error: { message: 'File not found', statusCode: 404 } })
const url = await app.storage.getUrl(file.path)
return reply.send({ ...file, url })
})
// Delete a file
app.delete('/files/:id', { preHandler: [app.authenticate] }, async (request, reply) => {
const { id } = request.params as { id: string }
const file = await FileService.delete(app.db, app.storage, request.companyId, id)
if (!file) return reply.status(404).send({ error: { message: 'File not found', statusCode: 404 } })
return reply.send(file)
})
}

View File

@@ -607,8 +607,8 @@ export const MemberIdentifierService = {
issuingAuthority: input.issuingAuthority, issuingAuthority: input.issuingAuthority,
issuedDate: input.issuedDate, issuedDate: input.issuedDate,
expiresAt: input.expiresAt, expiresAt: input.expiresAt,
imageFrontUrl: input.imageFrontUrl, imageFrontFileId: input.imageFrontFileId,
imageBackUrl: input.imageBackUrl, imageBackFileId: input.imageBackFileId,
notes: input.notes, notes: input.notes,
isPrimary: input.isPrimary, isPrimary: input.isPrimary,
}) })

View File

@@ -0,0 +1,139 @@
import { eq, and, count } from 'drizzle-orm'
import type { PostgresJsDatabase } from 'drizzle-orm/postgres-js'
import { files } from '../db/schema/files.js'
import type { StorageProvider } from '../storage/index.js'
import { randomUUID } from 'crypto'
const ALLOWED_IMAGE_TYPES = ['image/jpeg', 'image/png', 'image/webp']
const ALLOWED_PDF_TYPES = ['application/pdf']
const ALLOWED_TYPES = [...ALLOWED_IMAGE_TYPES, ...ALLOWED_PDF_TYPES]
const MAX_IMAGE_SIZE = 10 * 1024 * 1024 // 10 MB
const MAX_PDF_SIZE = 25 * 1024 * 1024 // 25 MB
const MAX_FILES_PER_ENTITY = 20
function getExtension(contentType: string): string {
const map: Record<string, string> = {
'image/jpeg': 'jpg',
'image/png': 'png',
'image/webp': 'webp',
'application/pdf': 'pdf',
}
return map[contentType] ?? 'bin'
}
export const FileService = {
async upload(
db: PostgresJsDatabase,
storage: StorageProvider,
companyId: string,
input: {
data: Buffer
filename: string
contentType: string
entityType: string
entityId: string
category: string
uploadedBy?: string
},
) {
// Validate content type
if (!ALLOWED_TYPES.includes(input.contentType)) {
throw new Error(`File type not allowed: ${input.contentType}`)
}
// Validate size
const maxSize = ALLOWED_IMAGE_TYPES.includes(input.contentType) ? MAX_IMAGE_SIZE : MAX_PDF_SIZE
if (input.data.length > maxSize) {
throw new Error(`File too large: ${input.data.length} bytes (max ${maxSize})`)
}
// Check per-entity limit
const [existing] = await db
.select({ total: count() })
.from(files)
.where(
and(
eq(files.companyId, companyId),
eq(files.entityType, input.entityType),
eq(files.entityId, input.entityId),
),
)
if (existing.total >= MAX_FILES_PER_ENTITY) {
throw new Error(`Maximum ${MAX_FILES_PER_ENTITY} files per entity`)
}
// Generate path
const fileId = randomUUID()
const ext = getExtension(input.contentType)
const path = `${companyId}/${input.entityType}/${input.entityId}/${input.category}-${fileId}.${ext}`
// Write to storage
await storage.put(path, input.data, input.contentType)
// Insert record
const [file] = await db
.insert(files)
.values({
id: fileId,
companyId,
path,
filename: input.filename,
contentType: input.contentType,
sizeBytes: input.data.length,
entityType: input.entityType,
entityId: input.entityId,
category: input.category,
uploadedBy: input.uploadedBy,
})
.returning()
return file
},
async getById(db: PostgresJsDatabase, companyId: string, id: string) {
const [file] = await db
.select()
.from(files)
.where(and(eq(files.id, id), eq(files.companyId, companyId)))
.limit(1)
return file ?? null
},
async listByEntity(
db: PostgresJsDatabase,
companyId: string,
entityType: string,
entityId: string,
) {
return db
.select()
.from(files)
.where(
and(
eq(files.companyId, companyId),
eq(files.entityType, entityType),
eq(files.entityId, entityId),
),
)
.orderBy(files.createdAt)
},
async delete(
db: PostgresJsDatabase,
storage: StorageProvider,
companyId: string,
id: string,
) {
const file = await this.getById(db, companyId, id)
if (!file) return null
await storage.delete(file.path)
const [deleted] = await db
.delete(files)
.where(and(eq(files.id, id), eq(files.companyId, companyId)))
.returning()
return deleted ?? null
},
}

View File

@@ -0,0 +1,23 @@
import { LocalStorageProvider } from './local.js'
import type { StorageProvider } from './provider.js'
export type { StorageProvider }
export function createStorageProvider(): StorageProvider {
const provider = process.env.STORAGE_PROVIDER ?? 'local'
if (provider === 'local') {
const root = process.env.STORAGE_LOCAL_PATH ?? './data/files'
const baseUrl = `http://localhost:${process.env.PORT ?? '8000'}`
return new LocalStorageProvider(root, baseUrl)
}
if (provider === 's3') {
// Lazy import to avoid requiring @aws-sdk when using local
throw new Error(
'S3 provider requires @aws-sdk/client-s3. Install it and update this factory.',
)
}
throw new Error(`Unknown storage provider: ${provider}`)
}

View File

@@ -0,0 +1,48 @@
import { mkdir, readFile, writeFile, unlink, access } from 'fs/promises'
import { dirname, join } from 'path'
import type { StorageProvider } from './provider.js'
export class LocalStorageProvider implements StorageProvider {
private root: string
private baseUrl: string
constructor(root: string, baseUrl: string) {
this.root = root
this.baseUrl = baseUrl
}
private fullPath(path: string): string {
return join(this.root, path)
}
async put(path: string, data: Buffer, _contentType: string): Promise<void> {
const fullPath = this.fullPath(path)
await mkdir(dirname(fullPath), { recursive: true })
await writeFile(fullPath, data)
}
async get(path: string): Promise<Buffer> {
return readFile(this.fullPath(path))
}
async delete(path: string): Promise<void> {
try {
await unlink(this.fullPath(path))
} catch (err: unknown) {
if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err
}
}
async exists(path: string): Promise<boolean> {
try {
await access(this.fullPath(path))
return true
} catch {
return false
}
}
async getUrl(path: string, _expiresIn?: number): Promise<string> {
return `${this.baseUrl}/v1/files/serve/${encodeURIComponent(path)}`
}
}

View File

@@ -0,0 +1,7 @@
export interface StorageProvider {
put(path: string, data: Buffer, contentType: string): Promise<void>
get(path: string): Promise<Buffer>
delete(path: string): Promise<void>
exists(path: string): Promise<boolean>
getUrl(path: string, expiresIn?: number): Promise<string>
}

View File

@@ -0,0 +1,47 @@
import type { StorageProvider } from './provider.js'
// S3 provider — requires @aws-sdk/client-s3 (install when needed)
// This is a placeholder that documents the interface. Install the SDK
// and uncomment when deploying with S3.
export class S3StorageProvider implements StorageProvider {
private bucket: string
private region: string
private endpoint?: string
constructor(config: {
bucket: string
region: string
endpoint?: string
accessKey: string
secretKey: string
}) {
this.bucket = config.bucket
this.region = config.region
this.endpoint = config.endpoint
// TODO: initialize S3Client from @aws-sdk/client-s3
throw new Error(
'S3 provider not yet implemented. Install @aws-sdk/client-s3 and implement.',
)
}
async put(_path: string, _data: Buffer, _contentType: string): Promise<void> {
throw new Error('Not implemented')
}
async get(_path: string): Promise<Buffer> {
throw new Error('Not implemented')
}
async delete(_path: string): Promise<void> {
throw new Error('Not implemented')
}
async exists(_path: string): Promise<boolean> {
throw new Error('Not implemented')
}
async getUrl(_path: string, _expiresIn?: number): Promise<string> {
throw new Error('Not implemented')
}
}

View File

@@ -66,8 +66,8 @@ export const MemberIdentifierCreateSchema = z.object({
issuingAuthority: opt(z.string().max(255)), issuingAuthority: opt(z.string().max(255)),
issuedDate: opt(z.string().date()), issuedDate: opt(z.string().date()),
expiresAt: opt(z.string().date()), expiresAt: opt(z.string().date()),
imageFront: opt(z.string()), imageFrontFileId: opt(z.string().uuid()),
imageBack: opt(z.string()), imageBackFileId: opt(z.string().uuid()),
notes: opt(z.string()), notes: opt(z.string()),
isPrimary: z.boolean().default(false), isPrimary: z.boolean().default(false),
}) })