All checks were successful
Deploy to Production / deploy (push) Successful in 2m2s
- Added public bucket integration for serving media files directly instead of through API proxy - Updated image picker to use new MediaLibrary component with improved UI/UX - Removed selectedImageKeys prop dependency from EditorShell and StepEdit components - Modified image upload endpoints to automatically copy files to public bucket when configured - Added fallback to API proxy URLs when public bucket copy fails - Improved image picker dialog
243 lines
8.3 KiB
TypeScript
243 lines
8.3 KiB
TypeScript
import express from 'express';
|
|
import multer from 'multer';
|
|
import crypto from 'crypto';
|
|
import { uploadBuffer, downloadObject, listObjects, deleteObject as s3DeleteObject, copyObject, getPublicUrlForKey } from './storage/s3';
|
|
import { db } from './db';
|
|
import { audioClips, posts } from './db/schema';
|
|
import { eq } from 'drizzle-orm';
|
|
|
|
const router = express.Router();
|
|
const upload = multer({ storage: multer.memoryStorage() });
|
|
|
|
router.get('/list', async (
|
|
req: express.Request,
|
|
res: express.Response
|
|
) => {
|
|
try {
|
|
const bucket = (req.query.bucket as string) || process.env.S3_BUCKET || '';
|
|
const prefix = (req.query.prefix as string) || '';
|
|
const page = Math.max(parseInt((req.query.page as string) || '1', 10), 1);
|
|
const pageSize = Math.min(Math.max(parseInt((req.query.pageSize as string) || '50', 10), 1), 200);
|
|
|
|
if (!bucket) return res.status(400).json({ error: 'bucket is required' });
|
|
|
|
// For pagination, we need to fetch more items and slice
|
|
// S3 doesn't support offset-based pagination natively, so we fetch a larger set
|
|
const maxKeys = page * pageSize;
|
|
const out = await listObjects({ bucket, prefix, maxKeys });
|
|
|
|
// Calculate pagination
|
|
const startIndex = (page - 1) * pageSize;
|
|
const endIndex = startIndex + pageSize;
|
|
const paginatedItems = out.items.slice(startIndex, endIndex);
|
|
|
|
return res.json({
|
|
items: paginatedItems,
|
|
total: out.items.length,
|
|
page,
|
|
pageSize,
|
|
});
|
|
} catch (err) {
|
|
console.error('List objects failed:', err);
|
|
return res.status(500).json({ error: 'List failed' });
|
|
}
|
|
});
|
|
|
|
router.delete('/obj', async (
|
|
req: express.Request,
|
|
res: express.Response
|
|
) => {
|
|
try {
|
|
const { bucket: bodyBucket, key } = req.body as { bucket?: string; key?: string };
|
|
const bucket = bodyBucket || process.env.S3_BUCKET || '';
|
|
if (!bucket || !key) return res.status(400).json({ error: 'bucket and key are required' });
|
|
await s3DeleteObject({ bucket, key });
|
|
return res.json({ success: true });
|
|
} catch (err) {
|
|
console.error('Delete object failed:', err);
|
|
return res.status(500).json({ error: 'Delete failed' });
|
|
}
|
|
});
|
|
|
|
router.post('/image', upload.single('image'), async (
|
|
req: express.Request,
|
|
res: express.Response
|
|
) => {
|
|
try {
|
|
console.log('[API] POST /api/media/image');
|
|
const { S3_ENDPOINT, S3_ACCESS_KEY, S3_SECRET_KEY } = process.env;
|
|
if (!S3_ENDPOINT || !S3_ACCESS_KEY || !S3_SECRET_KEY) {
|
|
console.error('Image upload failed: missing S3 config');
|
|
return res.status(500).json({ error: 'Object storage not configured' });
|
|
}
|
|
|
|
if (!req.file) return res.status(400).json({ error: 'No image file' });
|
|
|
|
const bucket = process.env.S3_BUCKET || 'voxblog';
|
|
const mime = req.file.mimetype || 'application/octet-stream';
|
|
const ext = mime.split('/')[1] || 'bin';
|
|
const key = `images/${new Date().toISOString().slice(0,10)}/${crypto.randomUUID()}.${ext}`;
|
|
console.log('[API] Uploading image', { mime, size: req.file.size, bucket, key });
|
|
|
|
const out = await uploadBuffer({
|
|
bucket,
|
|
key,
|
|
body: req.file.buffer,
|
|
contentType: mime,
|
|
});
|
|
|
|
// Copy to public bucket if configured
|
|
const publicBucket = process.env.PUBLIC_MEDIA_BUCKET;
|
|
const publicBaseUrl = process.env.PUBLIC_MEDIA_BASE_URL;
|
|
let finalUrl = `/api/media/obj?bucket=${encodeURIComponent(out.bucket)}&key=${encodeURIComponent(out.key)}`;
|
|
|
|
if (publicBucket && publicBaseUrl) {
|
|
try {
|
|
await copyObject({
|
|
srcBucket: bucket,
|
|
srcKey: key,
|
|
destBucket: publicBucket,
|
|
destKey: key,
|
|
});
|
|
const publicUrl = getPublicUrlForKey(key);
|
|
if (publicUrl) {
|
|
finalUrl = publicUrl;
|
|
console.log('[API] Image copied to public bucket, using public URL:', publicUrl);
|
|
}
|
|
} catch (err) {
|
|
console.warn('[API] Failed to copy image to public bucket, using API proxy URL:', err);
|
|
}
|
|
}
|
|
|
|
console.log('[API] Image upload success', out);
|
|
return res.status(200).json({ success: true, ...out, url: finalUrl });
|
|
} catch (err) {
|
|
console.error('Image upload failed:', err);
|
|
return res.status(500).json({ error: 'Image upload failed' });
|
|
}
|
|
});
|
|
|
|
router.get('/obj', async (
|
|
req: express.Request,
|
|
res: express.Response
|
|
) => {
|
|
try {
|
|
const bucket = (req.query.bucket as string) || process.env.S3_BUCKET || '';
|
|
const key = req.query.key as string;
|
|
if (!bucket || !key) return res.status(400).json({ error: 'bucket and key are required' });
|
|
const { buffer, contentType } = await downloadObject({ bucket, key });
|
|
res.setHeader('Content-Type', contentType || 'application/octet-stream');
|
|
// Basic cache headers for media
|
|
res.setHeader('Cache-Control', 'public, max-age=31536000, immutable');
|
|
return res.send(buffer);
|
|
} catch (err) {
|
|
console.error('Object fetch failed:', err);
|
|
return res.status(404).json({ error: 'Object not found' });
|
|
}
|
|
});
|
|
|
|
// Delete audio clip
|
|
router.delete('/audio/:clipId', async (
|
|
req: express.Request,
|
|
res: express.Response
|
|
) => {
|
|
try {
|
|
const clipId = req.params.clipId;
|
|
if (!clipId) return res.status(400).json({ error: 'clipId is required' });
|
|
|
|
// Get clip info from database
|
|
const rows = await db
|
|
.select()
|
|
.from(audioClips)
|
|
.where(eq(audioClips.id, clipId))
|
|
.limit(1);
|
|
|
|
if (rows.length === 0) {
|
|
return res.status(404).json({ error: 'Audio clip not found' });
|
|
}
|
|
|
|
const clip = rows[0];
|
|
|
|
// Delete from S3
|
|
try {
|
|
await s3DeleteObject({ bucket: clip.bucket, key: clip.key });
|
|
} catch (err) {
|
|
console.warn('[API] Failed to delete from S3:', err);
|
|
// Continue anyway to delete from DB
|
|
}
|
|
|
|
// Delete from database
|
|
await db.delete(audioClips).where(eq(audioClips.id, clipId));
|
|
|
|
// Touch post updated_at
|
|
if (clip.postId) {
|
|
await db.update(posts).set({ updatedAt: new Date() }).where(eq(posts.id, clip.postId));
|
|
}
|
|
|
|
return res.json({ success: true });
|
|
} catch (err) {
|
|
console.error('[API] Delete audio clip failed:', err);
|
|
return res.status(500).json({ error: 'Failed to delete audio clip' });
|
|
}
|
|
});
|
|
|
|
router.post('/audio', upload.single('audio'), async (
|
|
req: express.Request,
|
|
res: express.Response
|
|
) => {
|
|
try {
|
|
console.log('[API] POST /api/media/audio');
|
|
const { S3_ENDPOINT, S3_ACCESS_KEY, S3_SECRET_KEY } = process.env;
|
|
if (!S3_ENDPOINT || !S3_ACCESS_KEY || !S3_SECRET_KEY) {
|
|
console.error('Upload failed: missing S3 config (S3_ENDPOINT/S3_ACCESS_KEY/S3_SECRET_KEY)');
|
|
return res.status(500).json({ error: 'Object storage not configured' });
|
|
}
|
|
|
|
if (!req.file) return res.status(400).json({ error: 'No audio file' });
|
|
|
|
const bucket = process.env.S3_BUCKET || 'voxblog';
|
|
const mime = req.file.mimetype || 'application/octet-stream';
|
|
const ext = mime === 'audio/webm' ? 'webm' : (mime === 'audio/mp4' ? 'm4a' : (mime.split('/')[1] || 'bin'));
|
|
const postId = (req.query.postId as string) || '';
|
|
const clipId = crypto.randomUUID();
|
|
const key = postId
|
|
? `audio/posts/${encodeURIComponent(postId)}/${clipId}.${ext}`
|
|
: `audio/${new Date().toISOString().slice(0,10)}/${crypto.randomUUID()}.${ext}`;
|
|
console.log('[API] Uploading file', { mime, size: req.file.size, bucket, key, postId: postId || '(none)', clipId });
|
|
|
|
const out = await uploadBuffer({
|
|
bucket,
|
|
key,
|
|
body: req.file.buffer,
|
|
contentType: mime,
|
|
});
|
|
|
|
// If postId provided, insert into audio_clips table (Drizzle) and touch post.updated_at
|
|
if (postId) {
|
|
try {
|
|
const now = new Date();
|
|
await db.insert(audioClips).values({
|
|
id: clipId,
|
|
postId,
|
|
bucket: out.bucket,
|
|
key: out.key,
|
|
mime,
|
|
createdAt: now,
|
|
});
|
|
await db.update(posts).set({ updatedAt: now }).where(eq(posts.id, postId));
|
|
} catch (e) {
|
|
console.error('[API] DB insert audio_clip failed:', e);
|
|
// continue anyway, response still returns S3 info
|
|
}
|
|
}
|
|
|
|
console.log('[API] Upload success', out);
|
|
return res.status(200).json({ success: true, ...out, clipId: postId ? clipId : undefined, postId: postId || undefined });
|
|
} catch (err) {
|
|
console.error('Upload failed:', err);
|
|
return res.status(500).json({ error: 'Upload failed' });
|
|
}
|
|
});
|
|
|
|
export default router;
|