Compare commits

25 Commits

Author SHA1 Message Date
b905c37c8a refactor: Simplify message deletion logic and improve response handling in DiscordRateLimiter
All checks were successful
Gitea Actions Demo / Explore-Gitea-Actions (push) Successful in 7s
Vercel Production Deployment / Deploy-Production (push) Successful in 1m31s
2025-10-16 13:48:57 +02:00
ca4a696093 refactor: Ensure proper handling of empty responses in Discord API request 2025-10-16 13:46:58 +02:00
df677a345a refactor: Correctly iterate over file parts array in delete operation 2025-10-16 13:44:57 +02:00
56726adaad refactor: Correctly assign file parts array in response object 2025-10-16 13:40:41 +02:00
841923c7ff refactor: Enhance error handling for file metadata fetching and parsing 2025-10-16 13:38:20 +02:00
bc67634411 refactor: Simplify Discord API response handling and define response interface 2025-10-16 13:37:39 +02:00
49a6131b23 refactor: Improve error handling for Discord API response parsing 2025-10-16 13:33:35 +02:00
968a843120 refactor: Add type annotations for response objects in API request handling
All checks were successful
Gitea Actions Demo / Explore-Gitea-Actions (push) Successful in 7s
Vercel Production Deployment / Deploy-Production (push) Successful in 1m32s
2025-10-16 13:21:01 +02:00
bbf3725acf refactor: Implement DiscordRateLimiter for API requests to manage rate limits 2025-10-16 13:18:25 +02:00
22ed99da86 refactor: Adjust chunk size to 4 MB and increase parallel uploads to 5
All checks were successful
Gitea Actions Demo / Explore-Gitea-Actions (push) Successful in 7s
Vercel Production Deployment / Deploy-Production (push) Successful in 1m33s
2025-10-16 13:12:05 +02:00
0dfdbc1a7e refactor: Change Vercel CLI installation method from pnpm to npm for consistency
All checks were successful
Vercel Production Deployment / Deploy-Production (push) Successful in 1m35s
Gitea Actions Demo / Explore-Gitea-Actions (push) Successful in 7s
Vercel Preview Deployment / Deploy-Preview (push) Successful in 1m34s
2025-10-16 13:05:48 +02:00
a584cd0c66 refactor: Update Vercel CLI installation to use pnpm for consistency across workflows
Some checks failed
Gitea Actions Demo / Explore-Gitea-Actions (push) Successful in 7s
Vercel Production Deployment / Deploy-Production (push) Failing after 9s
2025-10-16 13:05:06 +02:00
3122f04fa4 refactor: Add pnpm installation step to Vercel deployment workflows
Some checks failed
Gitea Actions Demo / Explore-Gitea-Actions (push) Successful in 7s
Vercel Production Deployment / Deploy-Production (push) Has been cancelled
2025-10-16 13:00:26 +02:00
872d590ff0 refactor: Simplify body tag formatting in RootLayout component 2025-10-16 12:59:02 +02:00
ac52eaf984 refactor: Add @vercel/analytics dependency and integrate Analytics component in layout 2025-10-16 12:57:54 +02:00
a310fe9564 refactor: Update type annotations and error handling for improved type safety in route and download components
Some checks failed
Gitea Actions Demo / Explore-Gitea-Actions (push) Successful in 7s
Vercel Production Deployment / Deploy-Production (push) Failing after 24s
2025-10-16 11:28:11 +02:00
fde0dd7216 refactor: Improve type safety in download and file routes by updating type annotations 2025-10-16 11:25:08 +02:00
cf72ed90e7 refactor: Enhance type safety for database queries in delete and download routes 2025-10-16 11:20:08 +02:00
966450a2de refactor: Specify type for active array in upload process 2025-10-16 11:18:05 +02:00
f533d86896 refactor: Update type annotations for database query results in cleanup route 2025-10-16 11:17:30 +02:00
97b10716b3 refactor: Update type annotation for query result in complete-upload route 2025-10-16 11:15:51 +02:00
ae452de3d0 refactor: Update type annotations for database queries and improve button state handling in download page 2025-10-16 11:15:14 +02:00
bc723445fa feat: Add Gitea workflows for demo, Vercel preview, and production deployments 2025-10-16 11:13:22 +02:00
d2d50ae860 feat: Add Dockerfile for building and running LockLoad Next.js application 2025-10-16 10:53:29 +02:00
3a9b29c158 refactor: Update application title and description to reflect new branding as LockLoad 2025-10-16 10:35:15 +02:00
21 changed files with 338 additions and 110 deletions

View File

@@ -0,0 +1,19 @@
name: Gitea Actions Demo
run-name: ${{ gitea.actor }} is testing out Gitea Actions
on: [push]
jobs:
Explore-Gitea-Actions:
runs-on: ubuntu-latest
steps:
- run: echo "The job was automatically triggered by a ${{ gitea.event_name }} event."
- run: echo "This job is now running on a ${{ runner.os }} server hosted by Gitea!"
- run: echo "The name of your branch is ${{ gitea.ref }} and your repository is ${{ gitea.repository }}."
- name: Check out repository code
uses: actions/checkout@v3
- run: echo "The ${{ gitea.repository }} repository has been cloned to the runner."
- run: echo "The workflow is now ready to test your code on the runner."
- name: List files in the repository
run: |
ls ${{ gitea.workspace }}
- run: echo "This job's status is ${{ job.status }}."

View File

@@ -0,0 +1,23 @@
name: Vercel Preview Deployment
env:
VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }}
VERCEL_PROJECT_ID: ${{ secrets.VERCEL_PROJECT_ID }}
on:
push:
branches-ignore:
- main
jobs:
Deploy-Preview:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install pnpm
run: npm install -g pnpm
- name: Install Vercel CLI
run: npm install --global vercel@latest
- name: Pull Vercel Environment Information
run: vercel pull --yes --environment=preview --token=${{ secrets.VERCEL_TOKEN }}
- name: Build Project Artifacts
run: vercel build --token=${{ secrets.VERCEL_TOKEN }}
- name: Deploy Project Artifacts to Vercel
run: vercel deploy --prebuilt --token=${{ secrets.VERCEL_TOKEN }}

View File

@@ -0,0 +1,23 @@
name: Vercel Production Deployment
env:
VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }}
VERCEL_PROJECT_ID: ${{ secrets.VERCEL_PROJECT_ID }}
on:
push:
branches:
- main
jobs:
Deploy-Production:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install pnpm
run: npm install -g pnpm
- name: Install Vercel CLI
run: npm install --global vercel@latest
- name: Pull Vercel Environment Information
run: vercel pull --yes --environment=production --token=${{ secrets.VERCEL_TOKEN }}
- name: Build Project Artifacts
run: vercel build --prod --token=${{ secrets.VERCEL_TOKEN }}
- name: Deploy Project Artifacts to Vercel
run: vercel deploy --prebuilt --prod --token=${{ secrets.VERCEL_TOKEN }}

View File

@@ -9,6 +9,7 @@
"lint": "eslint" "lint": "eslint"
}, },
"dependencies": { "dependencies": {
"@vercel/analytics": "^1.5.0",
"bcrypt": "^6.0.0", "bcrypt": "^6.0.0",
"mysql2": "^3.15.2", "mysql2": "^3.15.2",
"next": "15.5.5", "next": "15.5.5",

34
pnpm-lock.yaml generated
View File

@@ -8,6 +8,9 @@ importers:
.: .:
dependencies: dependencies:
'@vercel/analytics':
specifier: ^1.5.0
version: 1.5.0(next@15.5.5(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react@19.1.0)
bcrypt: bcrypt:
specifier: ^6.0.0 specifier: ^6.0.0
version: 6.0.0 version: 6.0.0
@@ -623,6 +626,32 @@ packages:
cpu: [x64] cpu: [x64]
os: [win32] os: [win32]
'@vercel/analytics@1.5.0':
resolution: {integrity: sha512-MYsBzfPki4gthY5HnYN7jgInhAZ7Ac1cYDoRWFomwGHWEX7odTEzbtg9kf/QSo7XEsEAqlQugA6gJ2WS2DEa3g==}
peerDependencies:
'@remix-run/react': ^2
'@sveltejs/kit': ^1 || ^2
next: '>= 13'
react: ^18 || ^19 || ^19.0.0-rc
svelte: '>= 4'
vue: ^3
vue-router: ^4
peerDependenciesMeta:
'@remix-run/react':
optional: true
'@sveltejs/kit':
optional: true
next:
optional: true
react:
optional: true
svelte:
optional: true
vue:
optional: true
vue-router:
optional: true
acorn-jsx@5.3.2: acorn-jsx@5.3.2:
resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==}
peerDependencies: peerDependencies:
@@ -2360,6 +2389,11 @@ snapshots:
'@unrs/resolver-binding-win32-x64-msvc@1.11.1': '@unrs/resolver-binding-win32-x64-msvc@1.11.1':
optional: true optional: true
'@vercel/analytics@1.5.0(next@15.5.5(react-dom@19.1.0(react@19.1.0))(react@19.1.0))(react@19.1.0)':
optionalDependencies:
next: 15.5.5(react-dom@19.1.0(react@19.1.0))(react@19.1.0)
react: 19.1.0
acorn-jsx@5.3.2(acorn@8.15.0): acorn-jsx@5.3.2(acorn@8.15.0):
dependencies: dependencies:
acorn: 8.15.0 acorn: 8.15.0

View File

@@ -1 +0,0 @@
<svg fill="none" viewBox="0 0 16 16" xmlns="http://www.w3.org/2000/svg"><path d="M14.5 13.5V5.41a1 1 0 0 0-.3-.7L9.8.29A1 1 0 0 0 9.08 0H1.5v13.5A2.5 2.5 0 0 0 4 16h8a2.5 2.5 0 0 0 2.5-2.5m-1.5 0v-7H8v-5H3v12a1 1 0 0 0 1 1h8a1 1 0 0 0 1-1M9.5 5V2.12L12.38 5zM5.13 5h-.62v1.25h2.12V5zm-.62 3h7.12v1.25H4.5zm.62 3h-.62v1.25h7.12V11z" clip-rule="evenodd" fill="#666" fill-rule="evenodd"/></svg>

Before

Width:  |  Height:  |  Size: 391 B

View File

@@ -1 +0,0 @@
<svg fill="none" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16"><g clip-path="url(#a)"><path fill-rule="evenodd" clip-rule="evenodd" d="M10.27 14.1a6.5 6.5 0 0 0 3.67-3.45q-1.24.21-2.7.34-.31 1.83-.97 3.1M8 16A8 8 0 1 0 8 0a8 8 0 0 0 0 16m.48-1.52a7 7 0 0 1-.96 0H7.5a4 4 0 0 1-.84-1.32q-.38-.89-.63-2.08a40 40 0 0 0 3.92 0q-.25 1.2-.63 2.08a4 4 0 0 1-.84 1.31zm2.94-4.76q1.66-.15 2.95-.43a7 7 0 0 0 0-2.58q-1.3-.27-2.95-.43a18 18 0 0 1 0 3.44m-1.27-3.54a17 17 0 0 1 0 3.64 39 39 0 0 1-4.3 0 17 17 0 0 1 0-3.64 39 39 0 0 1 4.3 0m1.1-1.17q1.45.13 2.69.34a6.5 6.5 0 0 0-3.67-3.44q.65 1.26.98 3.1M8.48 1.5l.01.02q.41.37.84 1.31.38.89.63 2.08a40 40 0 0 0-3.92 0q.25-1.2.63-2.08a4 4 0 0 1 .85-1.32 7 7 0 0 1 .96 0m-2.75.4a6.5 6.5 0 0 0-3.67 3.44 29 29 0 0 1 2.7-.34q.31-1.83.97-3.1M4.58 6.28q-1.66.16-2.95.43a7 7 0 0 0 0 2.58q1.3.27 2.95.43a18 18 0 0 1 0-3.44m.17 4.71q-1.45-.12-2.69-.34a6.5 6.5 0 0 0 3.67 3.44q-.65-1.27-.98-3.1" fill="#666"/></g><defs><clipPath id="a"><path fill="#fff" d="M0 0h16v16H0z"/></clipPath></defs></svg>

Before

Width:  |  Height:  |  Size: 1.0 KiB

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 394 80"><path fill="#000" d="M262 0h68.5v12.7h-27.2v66.6h-13.6V12.7H262V0ZM149 0v12.7H94v20.4h44.3v12.6H94v21h55v12.6H80.5V0h68.7zm34.3 0h-17.8l63.8 79.4h17.9l-32-39.7 32-39.6h-17.9l-23 28.6-23-28.6zm18.3 56.7-9-11-27.1 33.7h17.8l18.3-22.7z"/><path fill="#000" d="M81 79.3 17 0H0v79.3h13.6V17l50.2 62.3H81Zm252.6-.4c-1 0-1.8-.4-2.5-1s-1.1-1.6-1.1-2.6.3-1.8 1-2.5 1.6-1 2.6-1 1.8.3 2.5 1a3.4 3.4 0 0 1 .6 4.3 3.7 3.7 0 0 1-3 1.8zm23.2-33.5h6v23.3c0 2.1-.4 4-1.3 5.5a9.1 9.1 0 0 1-3.8 3.5c-1.6.8-3.5 1.3-5.7 1.3-2 0-3.7-.4-5.3-1s-2.8-1.8-3.7-3.2c-.9-1.3-1.4-3-1.4-5h6c.1.8.3 1.6.7 2.2s1 1.2 1.6 1.5c.7.4 1.5.5 2.4.5 1 0 1.8-.2 2.4-.6a4 4 0 0 0 1.6-1.8c.3-.8.5-1.8.5-3V45.5zm30.9 9.1a4.4 4.4 0 0 0-2-3.3 7.5 7.5 0 0 0-4.3-1.1c-1.3 0-2.4.2-3.3.5-.9.4-1.6 1-2 1.6a3.5 3.5 0 0 0-.3 4c.3.5.7.9 1.3 1.2l1.8 1 2 .5 3.2.8c1.3.3 2.5.7 3.7 1.2a13 13 0 0 1 3.2 1.8 8.1 8.1 0 0 1 3 6.5c0 2-.5 3.7-1.5 5.1a10 10 0 0 1-4.4 3.5c-1.8.8-4.1 1.2-6.8 1.2-2.6 0-4.9-.4-6.8-1.2-2-.8-3.4-2-4.5-3.5a10 10 0 0 1-1.7-5.6h6a5 5 0 0 0 3.5 4.6c1 .4 2.2.6 3.4.6 1.3 0 2.5-.2 3.5-.6 1-.4 1.8-1 2.4-1.7a4 4 0 0 0 .8-2.4c0-.9-.2-1.6-.7-2.2a11 11 0 0 0-2.1-1.4l-3.2-1-3.8-1c-2.8-.7-5-1.7-6.6-3.2a7.2 7.2 0 0 1-2.4-5.7 8 8 0 0 1 1.7-5 10 10 0 0 1 4.3-3.5c2-.8 4-1.2 6.4-1.2 2.3 0 4.4.4 6.2 1.2 1.8.8 3.2 2 4.3 3.4 1 1.4 1.5 3 1.5 5h-5.8z"/></svg>

Before

Width:  |  Height:  |  Size: 1.3 KiB

View File

@@ -1 +0,0 @@
<svg fill="none" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1155 1000"><path d="m577.3 0 577.4 1000H0z" fill="#fff"/></svg>

Before

Width:  |  Height:  |  Size: 128 B

View File

@@ -1 +0,0 @@
<svg fill="none" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16"><path fill-rule="evenodd" clip-rule="evenodd" d="M1.5 2.5h13v10a1 1 0 0 1-1 1h-11a1 1 0 0 1-1-1zM0 1h16v11.5a2.5 2.5 0 0 1-2.5 2.5h-11A2.5 2.5 0 0 1 0 12.5zm3.75 4.5a.75.75 0 1 0 0-1.5.75.75 0 0 0 0 1.5M7 4.75a.75.75 0 1 1-1.5 0 .75.75 0 0 1 1.5 0m1.75.75a.75.75 0 1 0 0-1.5.75.75 0 0 0 0 1.5" fill="#666"/></svg>

Before

Width:  |  Height:  |  Size: 385 B

View File

@@ -2,6 +2,7 @@ import { NextResponse } from 'next/server';
import pool from '@/lib/db'; import pool from '@/lib/db';
import bcrypt from 'bcrypt'; import bcrypt from 'bcrypt';
import crypto from 'crypto'; import crypto from 'crypto';
import { ResultSetHeader } from 'mysql2/promise';
const SALT_ROUNDS = 10; const SALT_ROUNDS = 10;
@@ -28,7 +29,7 @@ export async function POST(request: Request) {
const connection = await pool.getConnection(); const connection = await pool.getConnection();
try { try {
const [result]: any = await connection.query( const [result]: [ResultSetHeader, unknown] = await connection.query(
'UPDATE files SET token_hash = ?, expires_at = ? WHERE id = ?', 'UPDATE files SET token_hash = ?, expires_at = ? WHERE id = ?',
[tokenHash, expiresAt, file_id] [tokenHash, expiresAt, file_id]
); );

View File

@@ -1,10 +1,24 @@
import { NextResponse } from 'next/server'; import { NextResponse } from 'next/server';
import pool from '@/lib/db'; import pool from '@/lib/db';
import { RowDataPacket } from 'mysql2/promise';
import { DiscordRateLimiter } from '@/lib/discordRateLimiter';
const DISCORD_API_BASE_URL = 'https://discord.com/api/v10'; // Or the appropriate version
interface FileId {
id: string;
}
interface FilePart {
discord_message_id: string | null;
}
if (!process.env.DISCORD_BOT_TOKEN || !process.env.DISCORD_CHANNEL_ID || !process.env.CRON_SECRET) { if (!process.env.DISCORD_BOT_TOKEN || !process.env.DISCORD_CHANNEL_ID || !process.env.CRON_SECRET) {
throw new Error('Discord or Cron secret environment variables are not configured'); throw new Error('Discord or Cron secret environment variables are not configured');
} }
const rateLimiter = new DiscordRateLimiter(DISCORD_API_BASE_URL, process.env.DISCORD_BOT_TOKEN);
export async function POST(request: Request) { export async function POST(request: Request) {
try { try {
const authHeader = request.headers.get('authorization'); const authHeader = request.headers.get('authorization');
@@ -21,33 +35,33 @@ export async function POST(request: Request) {
const staleTime = new Date(); const staleTime = new Date();
staleTime.setHours(staleTime.getHours() - 24); // Older than 24 hours staleTime.setHours(staleTime.getHours() - 24); // Older than 24 hours
const [incompleteFiles]: any[] = await connection.query( const [incompleteFiles]: [RowDataPacket[], unknown] = await connection.query(
'SELECT id FROM files WHERE token_hash IS NULL AND upload_at <= ?', 'SELECT id FROM files WHERE token_hash IS NULL AND upload_at <= ?',
[staleTime] [staleTime]
); );
if (incompleteFiles.length > 0) { if (incompleteFiles[0].length > 0) {
for (const file of incompleteFiles) { for (const file of incompleteFiles[0] as FileId[]) {
const file_id = file.id; const file_id = file.id;
const [parts]: any[] = await connection.query('SELECT discord_message_id FROM file_parts WHERE file_id = ?', [file_id]); const [parts]: [RowDataPacket[], unknown] = await connection.query('SELECT discord_message_id FROM file_parts WHERE file_id = ?', [file_id]);
if (parts.length > 0) { if (parts[0].length > 0) {
for (const part of parts) { for (const part of parts[0] as FilePart[]) {
if (!part.discord_message_id) continue; if (!part.discord_message_id) continue;
const deleteUrl = `https://discord.com/api/v10/channels/${process.env.DISCORD_CHANNEL_ID}/messages/${part.discord_message_id}`;
try { try {
const res = await fetch(deleteUrl, { const res: Response = await rateLimiter.request(
method: 'DELETE', `/channels/${process.env.DISCORD_CHANNEL_ID}/messages/${part.discord_message_id}`,
headers: { 'Authorization': `Bot ${process.env.DISCORD_BOT_TOKEN}` }, {
}); method: 'DELETE',
}
);
if (!res.ok && res.status !== 404) { if (!res.ok && res.status !== 404) {
console.warn(`Cleanup: Failed to delete message ${part.discord_message_id}. Status: ${res.status}`); console.warn(`Cleanup: Failed to delete message ${part.discord_message_id}. Status: ${res.status}`);
} }
} catch (e) { } catch (e) {
console.error(`Cleanup: Error deleting message ${part.discord_message_id}:`, e); console.error(`Cleanup: Error deleting message ${part.discord_message_id}:`, e);
} }
await new Promise(resolve => setTimeout(resolve, 1000)); // 1s delay
} }
} }
@@ -58,35 +72,35 @@ export async function POST(request: Request) {
} }
// --- 2. Cleanup for EXPIRED files --- // --- 2. Cleanup for EXPIRED files ---
const [expiredFiles]: any[] = await connection.query( const [expiredFiles]: [RowDataPacket[], unknown] = await connection.query(
'SELECT id FROM files WHERE expires_at <= NOW() AND deleted = 0' 'SELECT id FROM files WHERE expires_at <= NOW() AND deleted = 0'
); );
if (expiredFiles.length > 0) { if (expiredFiles[0].length > 0) {
for (const file of expiredFiles) { for (const file of expiredFiles[0] as FileId[]) {
const file_id = file.id; const file_id = file.id;
const [partsRows]: any[] = await connection.query( const [partsRows]: [RowDataPacket[], unknown] = await connection.query(
'SELECT discord_message_id FROM file_parts WHERE file_id = ?', 'SELECT discord_message_id FROM file_parts WHERE file_id = ?',
[file_id] [file_id]
); );
if (partsRows.length > 0) { if (partsRows[0].length > 0) {
for (const part of partsRows) { for (const part of partsRows[0] as FilePart[]) {
if (!part.discord_message_id) continue; if (!part.discord_message_id) continue;
const deleteUrl = `https://discord.com/api/v10/channels/${process.env.DISCORD_CHANNEL_ID}/messages/${part.discord_message_id}`;
try { try {
const res = await fetch(deleteUrl, { const res: Response = await rateLimiter.request(
method: 'DELETE', `/channels/${process.env.DISCORD_CHANNEL_ID}/messages/${part.discord_message_id}`,
headers: { 'Authorization': `Bot ${process.env.DISCORD_BOT_TOKEN}` }, {
}); method: 'DELETE',
}
);
if (!res.ok && res.status !== 404) { if (!res.ok && res.status !== 404) {
console.warn(`Cleanup: Failed to delete message ${part.discord_message_id}. Status: ${res.status}`); console.warn(`Cleanup: Failed to delete message ${part.discord_message_id}. Status: ${res.status}`);
} }
} catch (e) { } catch (e) {
console.error(`Cleanup: Error deleting message ${part.discord_message_id}:`, e); console.error(`Cleanup: Error deleting message ${part.discord_message_id}:`, e);
} }
await new Promise(resolve => setTimeout(resolve, 1000)); // 1s delay
} }
} }

View File

@@ -1,11 +1,25 @@
import { NextResponse } from 'next/server'; import { NextResponse } from 'next/server';
import pool from '@/lib/db'; import pool from '@/lib/db';
import bcrypt from 'bcrypt'; import bcrypt from 'bcrypt';
import { RowDataPacket } from 'mysql2/promise';
import { DiscordRateLimiter } from '@/lib/discordRateLimiter';
const DISCORD_API_BASE_URL = 'https://discord.com/api/v10'; // Or the appropriate version
interface FileData extends RowDataPacket {
token_hash: string;
}
interface FilePart extends RowDataPacket {
discord_message_id: string | null;
}
if (!process.env.DISCORD_BOT_TOKEN || !process.env.DISCORD_CHANNEL_ID) { if (!process.env.DISCORD_BOT_TOKEN || !process.env.DISCORD_CHANNEL_ID) {
throw new Error('Discord bot token or channel ID is not configured'); throw new Error('Discord bot token or channel ID is not configured');
} }
const rateLimiter = new DiscordRateLimiter(DISCORD_API_BASE_URL, process.env.DISCORD_BOT_TOKEN);
export async function POST(request: Request) { export async function POST(request: Request) {
try { try {
const { file_id, token } = await request.json(); const { file_id, token } = await request.json();
@@ -17,8 +31,8 @@ export async function POST(request: Request) {
const connection = await pool.getConnection(); const connection = await pool.getConnection();
try { try {
// 1. Fetch file and validate token // 1. Fetch file and validate token
const [fileRows]: any[] = await connection.query('SELECT * FROM files WHERE id = ? AND deleted = 0', [file_id]); const [fileRows]: [RowDataPacket[], unknown] = await connection.query('SELECT * FROM files WHERE id = ? AND deleted = 0', [file_id]);
const file = fileRows[0]; const file = fileRows[0] as FileData;
if (!file) { if (!file) {
return NextResponse.json({ error: 'File not found' }, { status: 404 }); return NextResponse.json({ error: 'File not found' }, { status: 404 });
@@ -30,31 +44,27 @@ export async function POST(request: Request) {
} }
// 2. Fetch all message IDs for the file parts // 2. Fetch all message IDs for the file parts
const [partsRows]: any[] = await connection.query( const [partsRows]: [RowDataPacket[], unknown] = await connection.query(
'SELECT discord_message_id FROM file_parts WHERE file_id = ?', 'SELECT discord_message_id FROM file_parts WHERE file_id = ?',
[file_id] [file_id]
); );
// 3. Delete each message from Discord sequentially to avoid rate limits // 3. Delete each message from Discord sequentially to avoid rate limits
for (const part of partsRows) { for (const part of partsRows as FilePart[]) {
if (!part.discord_message_id) continue; if (!part.discord_message_id) continue;
const deleteUrl = `https://discord.com/api/v10/channels/${process.env.DISCORD_CHANNEL_ID}/messages/${part.discord_message_id}`;
try { try {
const res = await fetch(deleteUrl, { await rateLimiter.request(
method: 'DELETE', `/channels/${process.env.DISCORD_CHANNEL_ID}/messages/${part.discord_message_id}`,
headers: { 'Authorization': `Bot ${process.env.DISCORD_BOT_TOKEN}` }, {
}); method: 'DELETE',
}
if (!res.ok && res.status !== 404) { // Don't warn on 404 (already deleted) );
console.warn(`Failed to delete message ${part.discord_message_id}. Status: ${res.status}`); // No need to check res.ok or res.status here, as rateLimiter.request throws on !response.ok
} // and returns null for 204.
} catch (e) { } catch (e) {
console.error(`Error deleting message ${part.discord_message_id}:`, e); console.error(`Error deleting message ${part.discord_message_id}:`, e);
} }
// Wait 1 second between delete requests to avoid hitting Discord's rate limits.
await new Promise(resolve => setTimeout(resolve, 1000));
} }
// 4. Hard delete the file from the database // 4. Hard delete the file from the database

View File

@@ -1,15 +1,40 @@
import { NextResponse } from 'next/server'; import { NextResponse, NextRequest } from 'next/server';
import pool from '@/lib/db'; import pool from '@/lib/db';
import { RowDataPacket } from 'mysql2/promise';
import { DiscordRateLimiter } from '@/lib/discordRateLimiter';
const DISCORD_API_BASE_URL = 'https://discord.com/api/v10'; // Or the appropriate version
interface FilePart extends RowDataPacket {
discord_message_id: string | null;
}
interface RouteContext {
params: Promise<{
file_id: string;
part_index: string;
}>;
}
interface DiscordAttachment {
url: string;
filename: string;
size: number;
}
interface DiscordMessageData {
attachments: DiscordAttachment[];
}
if (!process.env.DISCORD_BOT_TOKEN || !process.env.DISCORD_CHANNEL_ID) { if (!process.env.DISCORD_BOT_TOKEN || !process.env.DISCORD_CHANNEL_ID) {
throw new Error('Discord bot token or channel ID is not configured'); throw new Error('Discord bot token or channel ID is not configured');
} }
export async function GET(request: Request, context: any) { const rateLimiter = new DiscordRateLimiter(DISCORD_API_BASE_URL, process.env.DISCORD_BOT_TOKEN);
export async function GET(request: NextRequest, context: RouteContext) {
try { try {
const params = await context.params; const { file_id, part_index } = await context.params;
const file_id = params.file_id as string;
const part_index = params.part_index as string;
// NOTE: In a real-world scenario, you MUST validate if the user has permission to download this part. // NOTE: In a real-world scenario, you MUST validate if the user has permission to download this part.
// This would involve checking the download_token against the hash in the `files` table. // This would involve checking the download_token against the hash in the `files` table.
@@ -19,11 +44,11 @@ export async function GET(request: Request, context: any) {
const connection = await pool.getConnection(); const connection = await pool.getConnection();
let part; let part;
try { try {
const [partsRows]: any[] = await connection.query( const [partsRows]: [RowDataPacket[], unknown] = await connection.query(
'SELECT discord_message_id FROM file_parts WHERE file_id = ? AND part_index = ?', 'SELECT discord_message_id FROM file_parts WHERE file_id = ? AND part_index = ?',
[file_id, part_index] [file_id, part_index]
); );
part = partsRows[0]; part = partsRows[0] as FilePart;
} finally { } finally {
connection.release(); connection.release();
} }
@@ -33,18 +58,10 @@ export async function GET(request: Request, context: any) {
} }
// 1. Get fresh message data from Discord // 1. Get fresh message data from Discord
const messageUrl = `https://discord.com/api/v10/channels/${process.env.DISCORD_CHANNEL_ID}/messages/${part.discord_message_id}`; const messageData: DiscordMessageData = await rateLimiter.request<DiscordMessageData>(
const discordRes = await fetch(messageUrl, { `/channels/${process.env.DISCORD_CHANNEL_ID}/messages/${part.discord_message_id}`
headers: { );
'Authorization': `Bot ${process.env.DISCORD_BOT_TOKEN}`,
}
});
if (!discordRes.ok) {
return NextResponse.json({ error: 'Failed to fetch message from Discord' }, { status: 500 });
}
const messageData = await discordRes.json();
const attachment = messageData.attachments[0]; const attachment = messageData.attachments[0];
if (!attachment || !attachment.url) { if (!attachment || !attachment.url) {

View File

@@ -1,11 +1,30 @@
import { NextResponse } from 'next/server'; import { NextResponse, NextRequest } from 'next/server';
import pool from '@/lib/db'; import pool from '@/lib/db';
import bcrypt from 'bcrypt'; import bcrypt from 'bcrypt';
import { RowDataPacket } from 'mysql2/promise';
export async function GET(request: Request, context: any) { interface FileData extends RowDataPacket {
filename: string;
size: number;
num_parts: number;
expires_at: Date | null;
token_hash: string;
}
interface FilePartMetadata extends RowDataPacket {
part_index: number;
size: number;
}
interface RouteContext {
params: Promise<{
file_id: string;
}>;
}
export async function GET(request: NextRequest, context: RouteContext) {
try { try {
const params = await context.params; const { file_id } = await context.params;
const file_id = params.file_id as string;
const { searchParams } = new URL(request.url); const { searchParams } = new URL(request.url);
const token = searchParams.get('token'); const token = searchParams.get('token');
@@ -16,8 +35,8 @@ export async function GET(request: Request, context: any) {
const connection = await pool.getConnection(); const connection = await pool.getConnection();
try { try {
// 1. Fetch file metadata and token hash // 1. Fetch file metadata and token hash
const [fileRows]: any[] = await connection.query('SELECT * FROM files WHERE id = ? AND deleted = 0', [file_id]); const [fileRows]: [RowDataPacket[], unknown] = await connection.query('SELECT * FROM files WHERE id = ? AND deleted = 0', [file_id]);
const file = fileRows[0]; const file = fileRows[0] as FileData;
if (!file) { if (!file) {
return NextResponse.json({ error: 'File not found' }, { status: 404 }); return NextResponse.json({ error: 'File not found' }, { status: 404 });
@@ -35,7 +54,7 @@ export async function GET(request: Request, context: any) {
} }
// 4. Fetch file parts // 4. Fetch file parts
const [partsRows]: any[] = await connection.query( const [partsRows]: [RowDataPacket[], unknown] = await connection.query(
'SELECT part_index, size FROM file_parts WHERE file_id = ? ORDER BY part_index ASC', 'SELECT part_index, size FROM file_parts WHERE file_id = ? ORDER BY part_index ASC',
[file_id] [file_id]
); );
@@ -45,7 +64,7 @@ export async function GET(request: Request, context: any) {
filename: file.filename, filename: file.filename,
size: file.size, size: file.size,
num_parts: file.num_parts, num_parts: file.num_parts,
parts: partsRows, parts: partsRows as FilePartMetadata[],
}; };
return NextResponse.json(response); return NextResponse.json(response);

View File

@@ -1,11 +1,23 @@
import { NextResponse } from 'next/server'; import { NextResponse } from 'next/server';
import pool from '@/lib/db'; import pool from '@/lib/db';
import { DiscordRateLimiter } from '@/lib/discordRateLimiter';
const DISCORD_API_BASE_URL = 'https://discord.com/api/v10'; // Or the appropriate version
if (!process.env.DISCORD_BOT_TOKEN || !process.env.DISCORD_CHANNEL_ID) { if (!process.env.DISCORD_BOT_TOKEN || !process.env.DISCORD_CHANNEL_ID) {
throw new Error('Please define DISCORD_BOT_TOKEN and DISCORD_CHANNEL_ID in .env.local'); throw new Error('Please define DISCORD_BOT_TOKEN and DISCORD_CHANNEL_ID in .env.local');
} }
const DISCORD_API_URL = `https://discord.com/api/v10/channels/${process.env.DISCORD_CHANNEL_ID}/messages`; const rateLimiter = new DiscordRateLimiter(DISCORD_API_BASE_URL, process.env.DISCORD_BOT_TOKEN);
interface DiscordMessageResponse {
id: string;
attachments: Array<{
id: string;
url: string;
size: number;
}>;
}
export async function POST(request: Request) { export async function POST(request: Request) {
try { try {
@@ -22,21 +34,13 @@ export async function POST(request: Request) {
const discordFormData = new FormData(); const discordFormData = new FormData();
discordFormData.append('file', chunk, `chunk-${partIndex}.bin`); discordFormData.append('file', chunk, `chunk-${partIndex}.bin`);
const discordRes = await fetch(DISCORD_API_URL, { const discordData = await rateLimiter.request<DiscordMessageResponse>(
method: 'POST', `/channels/${process.env.DISCORD_CHANNEL_ID}/messages`,
headers: { {
'Authorization': `Bot ${process.env.DISCORD_BOT_TOKEN}`, method: 'POST',
}, body: discordFormData,
body: discordFormData, }
}); );
if (!discordRes.ok) {
const errorBody = await discordRes.json();
console.error('Discord API Error:', errorBody);
return NextResponse.json({ error: 'Failed to upload to Discord' }, { status: 500 });
}
const discordData = await discordRes.json();
const attachment = discordData.attachments[0]; const attachment = discordData.attachments[0];
if (!attachment) { if (!attachment) {

View File

@@ -3,6 +3,11 @@
import { useEffect, useState, useRef } from 'react'; import { useEffect, useState, useRef } from 'react';
import { useParams, useSearchParams } from 'next/navigation'; import { useParams, useSearchParams } from 'next/navigation';
interface MetadataPart {
part_index: number;
size: number;
}
// Helper to convert Base64 string back to ArrayBuffer // Helper to convert Base64 string back to ArrayBuffer
function base64ToBuffer(base64: string) { function base64ToBuffer(base64: string) {
const binary_string = window.atob(base64.replace(/-/g, '+').replace(/_/g, '/')); const binary_string = window.atob(base64.replace(/-/g, '+').replace(/_/g, '/'));
@@ -95,18 +100,29 @@ export default function DownloadPage() {
try { try {
const metaRes = await fetch(`/api/file/${file_id}?token=${token}`); const metaRes = await fetch(`/api/file/${file_id}?token=${token}`);
if (!metaRes.ok) { let metadata;
const err = await metaRes.json(); try {
throw new Error(err.error || 'Failed to fetch file metadata.'); metadata = await metaRes.json();
} catch (jsonError) {
console.error('Failed to parse metadata response as JSON:', jsonError);
throw new Error('Failed to parse file metadata.');
} }
const metadata = await metaRes.json();
if (!metaRes.ok) {
throw new Error(metadata.error || 'Failed to fetch file metadata.');
}
setFilename(metadata.filename); setFilename(metadata.filename);
setTotalBytes(metadata.size); setTotalBytes(metadata.size);
setDownloadState('downloading'); setDownloadState('downloading');
const encryptedParts = new Array(metadata.num_parts); const encryptedParts = new Array(metadata.num_parts);
const downloadPromises = metadata.parts.map(async (part: any) => { if (!Array.isArray(metadata.parts)) {
throw new Error('Invalid metadata: parts is not an array.');
}
const downloadPromises = metadata.parts.map(async (part: MetadataPart) => {
const response = await fetchWithRetry(`/api/download-part/${file_id}/${part.part_index}`); const response = await fetchWithRetry(`/api/download-part/${file_id}/${part.part_index}`);
const buffer = await response.arrayBuffer(); const buffer = await response.arrayBuffer();
encryptedParts[part.part_index] = { index: part.part_index, data: buffer }; encryptedParts[part.part_index] = { index: part.part_index, data: buffer };
@@ -127,7 +143,7 @@ export default function DownloadPage() {
['decrypt'] ['decrypt']
); );
const decryptPromises = encryptedParts.map(async (part) => { const decryptPromises = encryptedParts.map(async (part: { index: number; data: ArrayBuffer }) => {
const iv = part.data.slice(0, 12); const iv = part.data.slice(0, 12);
const data = part.data.slice(12); const data = part.data.slice(12);
const decrypted = await window.crypto.subtle.decrypt({ name: 'AES-GCM', iv }, fileKey, data); const decrypted = await window.crypto.subtle.decrypt({ name: 'AES-GCM', iv }, fileKey, data);
@@ -148,9 +164,8 @@ export default function DownloadPage() {
setDownloadState('complete'); setDownloadState('complete');
} catch (e: any) { } catch (e: unknown) {
setError(e.message || 'An unknown error occurred.'); setError(e instanceof Error ? e.message : 'An unknown error occurred.'); setDownloadState('error');
setDownloadState('error');
} }
}; };
@@ -181,9 +196,9 @@ export default function DownloadPage() {
setDeleteState('deleted'); setDeleteState('deleted');
} catch (e: any) { } catch (e: unknown) {
setDeleteState('error'); setDeleteState('error');
setDeleteError(e.message || 'An unknown error occurred during deletion.'); setDeleteError(e instanceof Error ? e.message : 'An unknown error occurred during deletion.');
} }
}; };
@@ -243,10 +258,10 @@ export default function DownloadPage() {
{deleteState === 'idle' && ( {deleteState === 'idle' && (
<button <button
onClick={handleDelete} onClick={handleDelete}
disabled={downloadState === 'downloading' || downloadState === 'decrypting' || deleteState === 'deleting'} disabled={downloadState === 'downloading' || downloadState === 'decrypting'}
className="w-full bg-red-600 hover:bg-red-700 text-white font-bold py-3 px-4 rounded-lg transition duration-300 disabled:bg-gray-600 disabled:cursor-not-allowed" className="w-full bg-red-600 hover:bg-red-700 text-white font-bold py-3 px-4 rounded-lg transition duration-300 disabled:bg-gray-600 disabled:cursor-not-allowed"
> >
{deleteState === 'deleting' ? 'Deleting...' : 'Delete File'} {'Delete File'}
</button> </button>
)} )}
{deleteState === 'deleting' && ( {deleteState === 'deleting' && (

Binary file not shown.

Before

Width:  |  Height:  |  Size: 25 KiB

After

Width:  |  Height:  |  Size: 43 KiB

View File

@@ -1,6 +1,7 @@
import type { Metadata } from "next"; import type { Metadata } from "next";
import { Geist, Geist_Mono } from "next/font/google"; import { Geist, Geist_Mono } from "next/font/google";
import "./globals.css"; import "./globals.css";
import { Analytics } from "@vercel/analytics/next"
const geistSans = Geist({ const geistSans = Geist({
variable: "--font-geist-sans", variable: "--font-geist-sans",
@@ -13,8 +14,8 @@ const geistMono = Geist_Mono({
}); });
export const metadata: Metadata = { export const metadata: Metadata = {
title: "Create Next App", title: "LockLoad",
description: "Generated by create next app", description: "Securely upload and share files.",
}; };
export default function RootLayout({ export default function RootLayout({
@@ -24,11 +25,10 @@ export default function RootLayout({
}>) { }>) {
return ( return (
<html lang="en"> <html lang="en">
<body <body className={`${geistSans.variable} ${geistMono.variable} antialiased`}>
className={`${geistSans.variable} ${geistMono.variable} antialiased`}
>
{children} {children}
</body> </body>
<Analytics />
</html> </html>
); );
} }

View File

@@ -2,8 +2,8 @@
import { useState, useEffect, useRef } from 'react'; import { useState, useEffect, useRef } from 'react';
const CHUNK_SIZE = 8 * 1024 * 1024; // 8 MB const CHUNK_SIZE = 4 * 1024 * 1024; // 4 MB
const PARALLEL_UPLOADS = 4; const PARALLEL_UPLOADS = 5;
function bufferToBase64(buffer: ArrayBuffer) { function bufferToBase64(buffer: ArrayBuffer) {
let binary = ''; let binary = '';
@@ -154,7 +154,7 @@ export default function Home() {
}; };
const queue = [...chunks]; const queue = [...chunks];
const active = []; const active: Promise<void>[] = [];
while (queue.length > 0 || active.length > 0) { while (queue.length > 0 || active.length > 0) {
while (active.length < PARALLEL_UPLOADS && queue.length > 0) { while (active.length < PARALLEL_UPLOADS && queue.length > 0) {
const task = queue.shift()!; const task = queue.shift()!;
@@ -184,7 +184,7 @@ export default function Home() {
<main className="flex min-h-screen flex-col items-center justify-center p-6 bg-gray-900 text-white font-sans"> <main className="flex min-h-screen flex-col items-center justify-center p-6 bg-gray-900 text-white font-sans">
<div className="w-full max-w-xl"> <div className="w-full max-w-xl">
<div className="text-center mb-8"> <div className="text-center mb-8">
<h1 className="text-4xl font-bold">Discord Storage</h1> <h1 className="text-4xl font-bold">LockLoad</h1>
<p className="text-lg text-gray-400">Upload large files, encrypted and stored on Discord. 7-day retention.</p> <p className="text-lg text-gray-400">Upload large files, encrypted and stored on Discord. 7-day retention.</p>
</div> </div>

View File

@@ -0,0 +1,53 @@
const sleep = (ms: number) => new Promise(resolve => setTimeout(resolve, ms));
export class DiscordRateLimiter {
private readonly baseUrl: string;
private readonly botToken: string;
constructor(baseUrl: string, botToken: string) {
this.baseUrl = baseUrl;
this.botToken = botToken;
}
/**
* Makes a request to the Discord API, handling rate limits automatically.
* @param path The API endpoint path (e.g., '/channels/123/messages').
* @param options Fetch options (method, headers, body, etc.).
* @returns The JSON response from the Discord API.
*/
public async request<T>(path: string, options: RequestInit = {}): Promise<T> {
const url = `${this.baseUrl}${path}`;
const headers: Record<string, string> = {
'Authorization': `Bot ${this.botToken}`,
...(options.headers as Record<string, string>),
};
// Only set Content-Type to application/json if body is a string and not explicitly set
if (typeof options.body === 'string' && !('Content-Type' in headers)) {
headers['Content-Type'] = 'application/json';
}
const response = await fetch(url, { ...options, headers });
if (response.status === 429) {
const retryAfter = response.headers.get('Retry-After');
const delay = retryAfter ? parseInt(retryAfter, 10) * 1000 : 1000; // Default to 1 second if header is missing
console.warn(`Discord API rate limit hit. Retrying after ${delay / 1000} seconds.`);
await sleep(delay);
return this.request<T>(path, options); // Retry the request
}
if (!response.ok) {
const errorData = await response.json().catch(() => ({ message: response.statusText }));
throw new Error(`Discord API error: ${response.status} - ${errorData.message || JSON.stringify(errorData)}`);
}
if (response.status === 204) {
return null as T; // Return null for 204 No Content
}
return response.json() as Promise<T>;
}
}