Implement file storage layer with local provider, upload/download API, tests
- StorageProvider interface with LocalProvider (S3 placeholder) - File table with entity_type/entity_id references, content type, path - POST /v1/files (multipart upload), GET /v1/files (list by entity), GET /v1/files/:id (metadata), GET /v1/files/serve/* (content), DELETE /v1/files/:id - member_identifier drops base64 columns, uses file_id FKs - File validation: type whitelist, size limits, per-entity max - Fastify storage plugin injects provider into app - 6 API tests for upload, list, get, delete, validation - Test runner kills stale port before starting backend
This commit is contained in:
@@ -3,6 +3,8 @@ import { type TestResult, printSuiteHeader, printTestResult } from './reporter.j
|
||||
|
||||
export interface TestContext {
|
||||
api: ApiClient
|
||||
token: string
|
||||
baseUrl: string
|
||||
test: (name: string, optsOrFn: { tags?: string[] } | (() => Promise<void>), maybeFn?: () => Promise<void>) => void
|
||||
assert: {
|
||||
status: (res: ApiResponse, expected: number) => void
|
||||
@@ -117,6 +119,8 @@ export async function runSuite(
|
||||
|
||||
const ctx: TestContext = {
|
||||
api,
|
||||
token,
|
||||
baseUrl,
|
||||
assert: makeAssert(),
|
||||
test(name, optsOrFn, maybeFn) {
|
||||
const opts = typeof optsOrFn === 'function' ? {} : optsOrFn
|
||||
|
||||
@@ -77,7 +77,16 @@ async function setupDatabase() {
|
||||
}
|
||||
|
||||
// --- Start backend ---
|
||||
async function killPort(port: number) {
|
||||
try {
|
||||
const { execSync } = await import('child_process')
|
||||
execSync(`lsof -ti:${port} | xargs kill -9 2>/dev/null || true`, { stdio: 'pipe' })
|
||||
await new Promise((r) => setTimeout(r, 1000))
|
||||
} catch {}
|
||||
}
|
||||
|
||||
async function startBackend(): Promise<Subprocess> {
|
||||
await killPort(TEST_PORT)
|
||||
const proc = spawn({
|
||||
cmd: ['bun', 'run', 'src/main.ts'],
|
||||
cwd: new URL('..', import.meta.url).pathname,
|
||||
@@ -90,6 +99,7 @@ async function startBackend(): Promise<Subprocess> {
|
||||
HOST: '0.0.0.0',
|
||||
NODE_ENV: 'development',
|
||||
LOG_LEVEL: 'error',
|
||||
STORAGE_LOCAL_PATH: '/tmp/forte-test-files',
|
||||
},
|
||||
stdout: 'pipe',
|
||||
stderr: 'pipe',
|
||||
|
||||
146
packages/backend/api-tests/suites/files.ts
Normal file
146
packages/backend/api-tests/suites/files.ts
Normal file
@@ -0,0 +1,146 @@
|
||||
import { suite } from '../lib/context.js'
|
||||
|
||||
// Helper: create a tiny 1x1 JPEG for testing uploads
|
||||
const TINY_JPEG = Buffer.from(
|
||||
'/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEB' +
|
||||
'AQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/2wBDAQEBAQEBAQEBAQEBAQEBAQEBAQEB' +
|
||||
'AQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/wAARCAABAAEDASIA' +
|
||||
'AhEBAxEB/8QAFAABAAAAAAAAAAAAAAAAAAAACf/EABQQAQAAAAAAAAAAAAAAAAAAAAD/xAAUAQEA' +
|
||||
'AAAAAAAAAAAAAAAAAAAB/8QAFBEBAAAAAAAAAAAAAAAAAAAAAP/aAAwDAQACEQMRAD8AJgA//9k=',
|
||||
'base64',
|
||||
)
|
||||
|
||||
suite('Files', { tags: ['files', 'storage'] }, (t) => {
|
||||
t.test('uploads an image file', { tags: ['upload'] }, async () => {
|
||||
const acct = await t.api.post('/v1/accounts', { name: 'File Test' })
|
||||
const member = await t.api.post(`/v1/accounts/${acct.data.id}/members`, {
|
||||
firstName: 'File',
|
||||
lastName: 'Test',
|
||||
})
|
||||
|
||||
// Upload via multipart
|
||||
const formData = new FormData()
|
||||
formData.append('file', new Blob([TINY_JPEG], { type: 'image/jpeg' }), 'test.jpg')
|
||||
formData.append('entityType', 'member_identifier')
|
||||
formData.append('entityId', member.data.id)
|
||||
formData.append('category', 'front')
|
||||
|
||||
const res = await fetch(`${t.baseUrl}/v1/files`, {
|
||||
method: 'POST',
|
||||
headers: { Authorization: `Bearer ${t.token}` },
|
||||
body: formData,
|
||||
})
|
||||
const data = await res.json()
|
||||
|
||||
t.assert.equal(res.status, 201)
|
||||
t.assert.ok(data.id)
|
||||
t.assert.equal(data.contentType, 'image/jpeg')
|
||||
t.assert.equal(data.entityType, 'member_identifier')
|
||||
t.assert.equal(data.category, 'front')
|
||||
t.assert.ok(data.url)
|
||||
})
|
||||
|
||||
t.test('lists files for an entity', { tags: ['read'] }, async () => {
|
||||
const acct = await t.api.post('/v1/accounts', { name: 'File List Test' })
|
||||
const member = await t.api.post(`/v1/accounts/${acct.data.id}/members`, {
|
||||
firstName: 'List',
|
||||
lastName: 'Files',
|
||||
})
|
||||
|
||||
// Upload a file first
|
||||
const formData = new FormData()
|
||||
formData.append('file', new Blob([TINY_JPEG], { type: 'image/jpeg' }), 'list-test.jpg')
|
||||
formData.append('entityType', 'member_identifier')
|
||||
formData.append('entityId', member.data.id)
|
||||
formData.append('category', 'back')
|
||||
|
||||
await fetch(`${t.baseUrl}/v1/files`, {
|
||||
method: 'POST',
|
||||
headers: { Authorization: `Bearer ${t.token}` },
|
||||
body: formData,
|
||||
})
|
||||
|
||||
const res = await t.api.get('/v1/files', {
|
||||
entityType: 'member_identifier',
|
||||
entityId: member.data.id,
|
||||
})
|
||||
t.assert.status(res, 200)
|
||||
t.assert.greaterThan(res.data.data.length, 0)
|
||||
t.assert.ok(res.data.data[0].url)
|
||||
})
|
||||
|
||||
t.test('gets file metadata by id', { tags: ['read'] }, async () => {
|
||||
const acct = await t.api.post('/v1/accounts', { name: 'File Get Test' })
|
||||
const member = await t.api.post(`/v1/accounts/${acct.data.id}/members`, {
|
||||
firstName: 'Get',
|
||||
lastName: 'File',
|
||||
})
|
||||
|
||||
const formData = new FormData()
|
||||
formData.append('file', new Blob([TINY_JPEG], { type: 'image/jpeg' }), 'get-test.jpg')
|
||||
formData.append('entityType', 'member_identifier')
|
||||
formData.append('entityId', member.data.id)
|
||||
formData.append('category', 'front')
|
||||
|
||||
const uploadRes = await fetch(`${t.baseUrl}/v1/files`, {
|
||||
method: 'POST',
|
||||
headers: { Authorization: `Bearer ${t.token}` },
|
||||
body: formData,
|
||||
})
|
||||
const uploaded = await uploadRes.json()
|
||||
|
||||
const res = await t.api.get(`/v1/files/${uploaded.id}`)
|
||||
t.assert.status(res, 200)
|
||||
t.assert.equal(res.data.id, uploaded.id)
|
||||
t.assert.equal(res.data.filename, 'get-test.jpg')
|
||||
})
|
||||
|
||||
t.test('deletes a file', { tags: ['delete'] }, async () => {
|
||||
const acct = await t.api.post('/v1/accounts', { name: 'File Delete Test' })
|
||||
const member = await t.api.post(`/v1/accounts/${acct.data.id}/members`, {
|
||||
firstName: 'Delete',
|
||||
lastName: 'File',
|
||||
})
|
||||
|
||||
const formData = new FormData()
|
||||
formData.append('file', new Blob([TINY_JPEG], { type: 'image/jpeg' }), 'delete-test.jpg')
|
||||
formData.append('entityType', 'member_identifier')
|
||||
formData.append('entityId', member.data.id)
|
||||
formData.append('category', 'front')
|
||||
|
||||
const uploadRes = await fetch(`${t.baseUrl}/v1/files`, {
|
||||
method: 'POST',
|
||||
headers: { Authorization: `Bearer ${t.token}` },
|
||||
body: formData,
|
||||
})
|
||||
const uploaded = await uploadRes.json()
|
||||
|
||||
const res = await t.api.del(`/v1/files/${uploaded.id}`)
|
||||
t.assert.status(res, 200)
|
||||
|
||||
const check = await t.api.get(`/v1/files/${uploaded.id}`)
|
||||
t.assert.status(check, 404)
|
||||
})
|
||||
|
||||
t.test('rejects unsupported file types', { tags: ['validation'] }, async () => {
|
||||
const acct = await t.api.post('/v1/accounts', { name: 'File Reject Test' })
|
||||
|
||||
const formData = new FormData()
|
||||
formData.append('file', new Blob(['not an image'], { type: 'text/plain' }), 'test.txt')
|
||||
formData.append('entityType', 'member_identifier')
|
||||
formData.append('entityId', acct.data.id)
|
||||
formData.append('category', 'front')
|
||||
|
||||
const res = await fetch(`${t.baseUrl}/v1/files`, {
|
||||
method: 'POST',
|
||||
headers: { Authorization: `Bearer ${t.token}` },
|
||||
body: formData,
|
||||
})
|
||||
t.assert.equal(res.status, 400)
|
||||
})
|
||||
|
||||
t.test('returns 404 for missing file', { tags: ['read'] }, async () => {
|
||||
const res = await t.api.get('/v1/files/a0000000-0000-0000-0000-999999999999')
|
||||
t.assert.status(res, 404)
|
||||
})
|
||||
})
|
||||
@@ -17,6 +17,7 @@
|
||||
"dependencies": {
|
||||
"@fastify/cors": "^10",
|
||||
"@fastify/jwt": "^9",
|
||||
"@fastify/multipart": "^9.4.0",
|
||||
"@fastify/rate-limit": "^10.3.0",
|
||||
"@forte/shared": "workspace:*",
|
||||
"bcrypt": "^6",
|
||||
|
||||
25
packages/backend/src/db/migrations/0012_file_storage.sql
Normal file
25
packages/backend/src/db/migrations/0012_file_storage.sql
Normal file
@@ -0,0 +1,25 @@
|
||||
-- File storage table
|
||||
CREATE TABLE IF NOT EXISTS "file" (
|
||||
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
"company_id" uuid NOT NULL REFERENCES "company"("id"),
|
||||
"path" varchar(1000) NOT NULL,
|
||||
"filename" varchar(255) NOT NULL,
|
||||
"content_type" varchar(100) NOT NULL,
|
||||
"size_bytes" integer NOT NULL,
|
||||
"entity_type" varchar(100) NOT NULL,
|
||||
"entity_id" uuid NOT NULL,
|
||||
"category" varchar(100) NOT NULL,
|
||||
"uploaded_by" uuid,
|
||||
"created_at" timestamp with time zone NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX "file_company_path" ON "file" ("company_id", "path");
|
||||
CREATE INDEX "file_entity" ON "file" ("company_id", "entity_type", "entity_id");
|
||||
|
||||
-- Update member_identifier: replace base64 columns with file references
|
||||
ALTER TABLE "member_identifier" DROP COLUMN IF EXISTS "image_front";
|
||||
ALTER TABLE "member_identifier" DROP COLUMN IF EXISTS "image_back";
|
||||
ALTER TABLE "member_identifier" DROP COLUMN IF EXISTS "image_front_url";
|
||||
ALTER TABLE "member_identifier" DROP COLUMN IF EXISTS "image_back_url";
|
||||
ALTER TABLE "member_identifier" ADD COLUMN "image_front_file_id" uuid REFERENCES "file"("id");
|
||||
ALTER TABLE "member_identifier" ADD COLUMN "image_back_file_id" uuid REFERENCES "file"("id");
|
||||
@@ -85,6 +85,13 @@
|
||||
"when": 1774710000000,
|
||||
"tag": "0011_member_address",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 12,
|
||||
"version": "7",
|
||||
"when": 1774720000000,
|
||||
"tag": "0012_file_storage",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -82,8 +82,8 @@ export const memberIdentifiers = pgTable('member_identifier', {
|
||||
issuingAuthority: varchar('issuing_authority', { length: 255 }),
|
||||
issuedDate: date('issued_date'),
|
||||
expiresAt: date('expires_at'),
|
||||
imageFront: text('image_front'),
|
||||
imageBack: text('image_back'),
|
||||
imageFrontFileId: uuid('image_front_file_id'),
|
||||
imageBackFileId: uuid('image_back_file_id'),
|
||||
notes: text('notes'),
|
||||
isPrimary: boolean('is_primary').notNull().default(false),
|
||||
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
|
||||
|
||||
21
packages/backend/src/db/schema/files.ts
Normal file
21
packages/backend/src/db/schema/files.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { pgTable, uuid, varchar, integer, timestamp } from 'drizzle-orm/pg-core'
|
||||
import { companies } from './stores.js'
|
||||
|
||||
export const files = pgTable('file', {
|
||||
id: uuid('id').primaryKey().defaultRandom(),
|
||||
companyId: uuid('company_id')
|
||||
.notNull()
|
||||
.references(() => companies.id),
|
||||
path: varchar('path', { length: 1000 }).notNull(),
|
||||
filename: varchar('filename', { length: 255 }).notNull(),
|
||||
contentType: varchar('content_type', { length: 100 }).notNull(),
|
||||
sizeBytes: integer('size_bytes').notNull(),
|
||||
entityType: varchar('entity_type', { length: 100 }).notNull(),
|
||||
entityId: uuid('entity_id').notNull(),
|
||||
category: varchar('category', { length: 100 }).notNull(),
|
||||
uploadedBy: uuid('uploaded_by'),
|
||||
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
|
||||
})
|
||||
|
||||
export type FileRecord = typeof files.$inferSelect
|
||||
export type FileRecordInsert = typeof files.$inferInsert
|
||||
@@ -6,12 +6,14 @@ import { corsPlugin } from './plugins/cors.js'
|
||||
import { errorHandlerPlugin } from './plugins/error-handler.js'
|
||||
import { authPlugin } from './plugins/auth.js'
|
||||
import { devAuthPlugin } from './plugins/dev-auth.js'
|
||||
import { storagePlugin } from './plugins/storage.js'
|
||||
import { healthRoutes } from './routes/v1/health.js'
|
||||
import { authRoutes } from './routes/v1/auth.js'
|
||||
import { accountRoutes } from './routes/v1/accounts.js'
|
||||
import { inventoryRoutes } from './routes/v1/inventory.js'
|
||||
import { productRoutes } from './routes/v1/products.js'
|
||||
import { lookupRoutes } from './routes/v1/lookups.js'
|
||||
import { fileRoutes } from './routes/v1/files.js'
|
||||
|
||||
export async function buildApp() {
|
||||
const app = Fastify({
|
||||
@@ -28,6 +30,7 @@ export async function buildApp() {
|
||||
await app.register(databasePlugin)
|
||||
await app.register(redisPlugin)
|
||||
await app.register(rateLimit, { global: false })
|
||||
await app.register(storagePlugin)
|
||||
|
||||
// Auth — JWT in production/test, dev bypass only in development without JWT_SECRET
|
||||
if (process.env.JWT_SECRET) {
|
||||
@@ -46,6 +49,7 @@ export async function buildApp() {
|
||||
await app.register(inventoryRoutes, { prefix: '/v1' })
|
||||
await app.register(productRoutes, { prefix: '/v1' })
|
||||
await app.register(lookupRoutes, { prefix: '/v1' })
|
||||
await app.register(fileRoutes, { prefix: '/v1' })
|
||||
|
||||
return app
|
||||
}
|
||||
|
||||
14
packages/backend/src/plugins/storage.ts
Normal file
14
packages/backend/src/plugins/storage.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import fp from 'fastify-plugin'
|
||||
import { createStorageProvider, type StorageProvider } from '../storage/index.js'
|
||||
|
||||
declare module 'fastify' {
|
||||
interface FastifyInstance {
|
||||
storage: StorageProvider
|
||||
}
|
||||
}
|
||||
|
||||
export const storagePlugin = fp(async (app) => {
|
||||
const storage = createStorageProvider()
|
||||
app.decorate('storage', storage)
|
||||
app.log.info(`Storage provider: ${process.env.STORAGE_PROVIDER ?? 'local'}`)
|
||||
})
|
||||
106
packages/backend/src/routes/v1/files.ts
Normal file
106
packages/backend/src/routes/v1/files.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import type { FastifyPluginAsync } from 'fastify'
|
||||
import multipart from '@fastify/multipart'
|
||||
import { FileService } from '../../services/file.service.js'
|
||||
|
||||
export const fileRoutes: FastifyPluginAsync = async (app) => {
|
||||
await app.register(multipart, {
|
||||
limits: {
|
||||
fileSize: 25 * 1024 * 1024, // 25 MB max
|
||||
files: 1,
|
||||
},
|
||||
})
|
||||
|
||||
// List files for an entity
|
||||
app.get('/files', { preHandler: [app.authenticate] }, async (request, reply) => {
|
||||
const { entityType, entityId } = request.query as { entityType?: string; entityId?: string }
|
||||
if (!entityType || !entityId) {
|
||||
return reply.status(400).send({
|
||||
error: { message: 'entityType and entityId query params required', statusCode: 400 },
|
||||
})
|
||||
}
|
||||
|
||||
const fileRecords = await FileService.listByEntity(app.db, request.companyId, entityType, entityId)
|
||||
const data = await Promise.all(
|
||||
fileRecords.map(async (f) => ({ ...f, url: await app.storage.getUrl(f.path) })),
|
||||
)
|
||||
return reply.send({ data })
|
||||
})
|
||||
|
||||
// Upload a file
|
||||
app.post('/files', { preHandler: [app.authenticate] }, async (request, reply) => {
|
||||
const data = await request.file()
|
||||
if (!data) {
|
||||
return reply.status(400).send({ error: { message: 'No file provided', statusCode: 400 } })
|
||||
}
|
||||
|
||||
const entityType = (data.fields.entityType as { value?: string })?.value
|
||||
const entityId = (data.fields.entityId as { value?: string })?.value
|
||||
const category = (data.fields.category as { value?: string })?.value
|
||||
|
||||
if (!entityType || !entityId || !category) {
|
||||
return reply.status(400).send({
|
||||
error: { message: 'entityType, entityId, and category are required', statusCode: 400 },
|
||||
})
|
||||
}
|
||||
|
||||
const buffer = await data.toBuffer()
|
||||
|
||||
try {
|
||||
const file = await FileService.upload(app.db, app.storage, request.companyId, {
|
||||
data: buffer,
|
||||
filename: data.filename,
|
||||
contentType: data.mimetype,
|
||||
entityType,
|
||||
entityId,
|
||||
category,
|
||||
uploadedBy: request.user.id,
|
||||
})
|
||||
const url = await app.storage.getUrl(file.path)
|
||||
return reply.status(201).send({ ...file, url })
|
||||
} catch (err) {
|
||||
if (err instanceof Error && (err.message.includes('not allowed') || err.message.includes('too large') || err.message.includes('Maximum'))) {
|
||||
return reply.status(400).send({ error: { message: err.message, statusCode: 400 } })
|
||||
}
|
||||
throw err
|
||||
}
|
||||
})
|
||||
|
||||
// Serve file content (for local provider)
|
||||
app.get('/files/serve/*', { preHandler: [app.authenticate] }, async (request, reply) => {
|
||||
const filePath = (request.params as { '*': string })['*']
|
||||
if (!filePath) {
|
||||
return reply.status(400).send({ error: { message: 'Path required', statusCode: 400 } })
|
||||
}
|
||||
|
||||
try {
|
||||
const data = await app.storage.get(filePath)
|
||||
const ext = filePath.split('.').pop()?.toLowerCase()
|
||||
const contentTypeMap: Record<string, string> = {
|
||||
jpg: 'image/jpeg', jpeg: 'image/jpeg', png: 'image/png', webp: 'image/webp', pdf: 'application/pdf',
|
||||
}
|
||||
return reply
|
||||
.header('Content-Type', contentTypeMap[ext ?? ''] ?? 'application/octet-stream')
|
||||
.header('Cache-Control', 'private, max-age=3600')
|
||||
.send(data)
|
||||
} catch {
|
||||
return reply.status(404).send({ error: { message: 'File not found', statusCode: 404 } })
|
||||
}
|
||||
})
|
||||
|
||||
// Get file metadata
|
||||
app.get('/files/:id', { preHandler: [app.authenticate] }, async (request, reply) => {
|
||||
const { id } = request.params as { id: string }
|
||||
const file = await FileService.getById(app.db, request.companyId, id)
|
||||
if (!file) return reply.status(404).send({ error: { message: 'File not found', statusCode: 404 } })
|
||||
const url = await app.storage.getUrl(file.path)
|
||||
return reply.send({ ...file, url })
|
||||
})
|
||||
|
||||
// Delete a file
|
||||
app.delete('/files/:id', { preHandler: [app.authenticate] }, async (request, reply) => {
|
||||
const { id } = request.params as { id: string }
|
||||
const file = await FileService.delete(app.db, app.storage, request.companyId, id)
|
||||
if (!file) return reply.status(404).send({ error: { message: 'File not found', statusCode: 404 } })
|
||||
return reply.send(file)
|
||||
})
|
||||
}
|
||||
@@ -607,8 +607,8 @@ export const MemberIdentifierService = {
|
||||
issuingAuthority: input.issuingAuthority,
|
||||
issuedDate: input.issuedDate,
|
||||
expiresAt: input.expiresAt,
|
||||
imageFrontUrl: input.imageFrontUrl,
|
||||
imageBackUrl: input.imageBackUrl,
|
||||
imageFrontFileId: input.imageFrontFileId,
|
||||
imageBackFileId: input.imageBackFileId,
|
||||
notes: input.notes,
|
||||
isPrimary: input.isPrimary,
|
||||
})
|
||||
|
||||
139
packages/backend/src/services/file.service.ts
Normal file
139
packages/backend/src/services/file.service.ts
Normal file
@@ -0,0 +1,139 @@
|
||||
import { eq, and, count } from 'drizzle-orm'
|
||||
import type { PostgresJsDatabase } from 'drizzle-orm/postgres-js'
|
||||
import { files } from '../db/schema/files.js'
|
||||
import type { StorageProvider } from '../storage/index.js'
|
||||
import { randomUUID } from 'crypto'
|
||||
|
||||
const ALLOWED_IMAGE_TYPES = ['image/jpeg', 'image/png', 'image/webp']
|
||||
const ALLOWED_PDF_TYPES = ['application/pdf']
|
||||
const ALLOWED_TYPES = [...ALLOWED_IMAGE_TYPES, ...ALLOWED_PDF_TYPES]
|
||||
const MAX_IMAGE_SIZE = 10 * 1024 * 1024 // 10 MB
|
||||
const MAX_PDF_SIZE = 25 * 1024 * 1024 // 25 MB
|
||||
const MAX_FILES_PER_ENTITY = 20
|
||||
|
||||
function getExtension(contentType: string): string {
|
||||
const map: Record<string, string> = {
|
||||
'image/jpeg': 'jpg',
|
||||
'image/png': 'png',
|
||||
'image/webp': 'webp',
|
||||
'application/pdf': 'pdf',
|
||||
}
|
||||
return map[contentType] ?? 'bin'
|
||||
}
|
||||
|
||||
export const FileService = {
|
||||
async upload(
|
||||
db: PostgresJsDatabase,
|
||||
storage: StorageProvider,
|
||||
companyId: string,
|
||||
input: {
|
||||
data: Buffer
|
||||
filename: string
|
||||
contentType: string
|
||||
entityType: string
|
||||
entityId: string
|
||||
category: string
|
||||
uploadedBy?: string
|
||||
},
|
||||
) {
|
||||
// Validate content type
|
||||
if (!ALLOWED_TYPES.includes(input.contentType)) {
|
||||
throw new Error(`File type not allowed: ${input.contentType}`)
|
||||
}
|
||||
|
||||
// Validate size
|
||||
const maxSize = ALLOWED_IMAGE_TYPES.includes(input.contentType) ? MAX_IMAGE_SIZE : MAX_PDF_SIZE
|
||||
if (input.data.length > maxSize) {
|
||||
throw new Error(`File too large: ${input.data.length} bytes (max ${maxSize})`)
|
||||
}
|
||||
|
||||
// Check per-entity limit
|
||||
const [existing] = await db
|
||||
.select({ total: count() })
|
||||
.from(files)
|
||||
.where(
|
||||
and(
|
||||
eq(files.companyId, companyId),
|
||||
eq(files.entityType, input.entityType),
|
||||
eq(files.entityId, input.entityId),
|
||||
),
|
||||
)
|
||||
if (existing.total >= MAX_FILES_PER_ENTITY) {
|
||||
throw new Error(`Maximum ${MAX_FILES_PER_ENTITY} files per entity`)
|
||||
}
|
||||
|
||||
// Generate path
|
||||
const fileId = randomUUID()
|
||||
const ext = getExtension(input.contentType)
|
||||
const path = `${companyId}/${input.entityType}/${input.entityId}/${input.category}-${fileId}.${ext}`
|
||||
|
||||
// Write to storage
|
||||
await storage.put(path, input.data, input.contentType)
|
||||
|
||||
// Insert record
|
||||
const [file] = await db
|
||||
.insert(files)
|
||||
.values({
|
||||
id: fileId,
|
||||
companyId,
|
||||
path,
|
||||
filename: input.filename,
|
||||
contentType: input.contentType,
|
||||
sizeBytes: input.data.length,
|
||||
entityType: input.entityType,
|
||||
entityId: input.entityId,
|
||||
category: input.category,
|
||||
uploadedBy: input.uploadedBy,
|
||||
})
|
||||
.returning()
|
||||
|
||||
return file
|
||||
},
|
||||
|
||||
async getById(db: PostgresJsDatabase, companyId: string, id: string) {
|
||||
const [file] = await db
|
||||
.select()
|
||||
.from(files)
|
||||
.where(and(eq(files.id, id), eq(files.companyId, companyId)))
|
||||
.limit(1)
|
||||
return file ?? null
|
||||
},
|
||||
|
||||
async listByEntity(
|
||||
db: PostgresJsDatabase,
|
||||
companyId: string,
|
||||
entityType: string,
|
||||
entityId: string,
|
||||
) {
|
||||
return db
|
||||
.select()
|
||||
.from(files)
|
||||
.where(
|
||||
and(
|
||||
eq(files.companyId, companyId),
|
||||
eq(files.entityType, entityType),
|
||||
eq(files.entityId, entityId),
|
||||
),
|
||||
)
|
||||
.orderBy(files.createdAt)
|
||||
},
|
||||
|
||||
async delete(
|
||||
db: PostgresJsDatabase,
|
||||
storage: StorageProvider,
|
||||
companyId: string,
|
||||
id: string,
|
||||
) {
|
||||
const file = await this.getById(db, companyId, id)
|
||||
if (!file) return null
|
||||
|
||||
await storage.delete(file.path)
|
||||
|
||||
const [deleted] = await db
|
||||
.delete(files)
|
||||
.where(and(eq(files.id, id), eq(files.companyId, companyId)))
|
||||
.returning()
|
||||
|
||||
return deleted ?? null
|
||||
},
|
||||
}
|
||||
23
packages/backend/src/storage/index.ts
Normal file
23
packages/backend/src/storage/index.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { LocalStorageProvider } from './local.js'
|
||||
import type { StorageProvider } from './provider.js'
|
||||
|
||||
export type { StorageProvider }
|
||||
|
||||
export function createStorageProvider(): StorageProvider {
|
||||
const provider = process.env.STORAGE_PROVIDER ?? 'local'
|
||||
|
||||
if (provider === 'local') {
|
||||
const root = process.env.STORAGE_LOCAL_PATH ?? './data/files'
|
||||
const baseUrl = `http://localhost:${process.env.PORT ?? '8000'}`
|
||||
return new LocalStorageProvider(root, baseUrl)
|
||||
}
|
||||
|
||||
if (provider === 's3') {
|
||||
// Lazy import to avoid requiring @aws-sdk when using local
|
||||
throw new Error(
|
||||
'S3 provider requires @aws-sdk/client-s3. Install it and update this factory.',
|
||||
)
|
||||
}
|
||||
|
||||
throw new Error(`Unknown storage provider: ${provider}`)
|
||||
}
|
||||
48
packages/backend/src/storage/local.ts
Normal file
48
packages/backend/src/storage/local.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { mkdir, readFile, writeFile, unlink, access } from 'fs/promises'
|
||||
import { dirname, join } from 'path'
|
||||
import type { StorageProvider } from './provider.js'
|
||||
|
||||
export class LocalStorageProvider implements StorageProvider {
|
||||
private root: string
|
||||
private baseUrl: string
|
||||
|
||||
constructor(root: string, baseUrl: string) {
|
||||
this.root = root
|
||||
this.baseUrl = baseUrl
|
||||
}
|
||||
|
||||
private fullPath(path: string): string {
|
||||
return join(this.root, path)
|
||||
}
|
||||
|
||||
async put(path: string, data: Buffer, _contentType: string): Promise<void> {
|
||||
const fullPath = this.fullPath(path)
|
||||
await mkdir(dirname(fullPath), { recursive: true })
|
||||
await writeFile(fullPath, data)
|
||||
}
|
||||
|
||||
async get(path: string): Promise<Buffer> {
|
||||
return readFile(this.fullPath(path))
|
||||
}
|
||||
|
||||
async delete(path: string): Promise<void> {
|
||||
try {
|
||||
await unlink(this.fullPath(path))
|
||||
} catch (err: unknown) {
|
||||
if ((err as NodeJS.ErrnoException).code !== 'ENOENT') throw err
|
||||
}
|
||||
}
|
||||
|
||||
async exists(path: string): Promise<boolean> {
|
||||
try {
|
||||
await access(this.fullPath(path))
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
async getUrl(path: string, _expiresIn?: number): Promise<string> {
|
||||
return `${this.baseUrl}/v1/files/serve/${encodeURIComponent(path)}`
|
||||
}
|
||||
}
|
||||
7
packages/backend/src/storage/provider.ts
Normal file
7
packages/backend/src/storage/provider.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
export interface StorageProvider {
|
||||
put(path: string, data: Buffer, contentType: string): Promise<void>
|
||||
get(path: string): Promise<Buffer>
|
||||
delete(path: string): Promise<void>
|
||||
exists(path: string): Promise<boolean>
|
||||
getUrl(path: string, expiresIn?: number): Promise<string>
|
||||
}
|
||||
47
packages/backend/src/storage/s3.ts
Normal file
47
packages/backend/src/storage/s3.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import type { StorageProvider } from './provider.js'
|
||||
|
||||
// S3 provider — requires @aws-sdk/client-s3 (install when needed)
|
||||
// This is a placeholder that documents the interface. Install the SDK
|
||||
// and uncomment when deploying with S3.
|
||||
|
||||
export class S3StorageProvider implements StorageProvider {
|
||||
private bucket: string
|
||||
private region: string
|
||||
private endpoint?: string
|
||||
|
||||
constructor(config: {
|
||||
bucket: string
|
||||
region: string
|
||||
endpoint?: string
|
||||
accessKey: string
|
||||
secretKey: string
|
||||
}) {
|
||||
this.bucket = config.bucket
|
||||
this.region = config.region
|
||||
this.endpoint = config.endpoint
|
||||
// TODO: initialize S3Client from @aws-sdk/client-s3
|
||||
throw new Error(
|
||||
'S3 provider not yet implemented. Install @aws-sdk/client-s3 and implement.',
|
||||
)
|
||||
}
|
||||
|
||||
async put(_path: string, _data: Buffer, _contentType: string): Promise<void> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async get(_path: string): Promise<Buffer> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async delete(_path: string): Promise<void> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async exists(_path: string): Promise<boolean> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
|
||||
async getUrl(_path: string, _expiresIn?: number): Promise<string> {
|
||||
throw new Error('Not implemented')
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user