All work on icebox
This commit is contained in:
parent
19b077e6cf
commit
8919032849
|
@ -1,3 +1,5 @@
|
|||
node_modules
|
||||
.env
|
||||
watch/*
|
||||
data/*
|
||||
*.DS_Store
|
|
@ -0,0 +1 @@
|
|||
DATABASE_FILE=data/icebox.sqlite
|
|
@ -0,0 +1,3 @@
|
|||
export declare function envString(variable: string, defaultString: string): string;
|
||||
export declare function envFloat(variable: string, defaultFloat: number): number;
|
||||
export declare function envInt(variable: string, defaultInt: number): number;
|
|
@ -0,0 +1,16 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.envString = envString;
|
||||
exports.envFloat = envFloat;
|
||||
exports.envInt = envInt;
|
||||
function envString(variable, defaultString) {
|
||||
return typeof process.env[variable] !== 'undefined' ? process.env[variable] : defaultString;
|
||||
}
|
||||
function envFloat(variable, defaultFloat) {
|
||||
return typeof process.env[variable] !== 'undefined' ? parseFloat(process.env[variable]) : defaultFloat;
|
||||
}
|
||||
function envInt(variable, defaultInt) {
|
||||
return typeof process.env[variable] !== 'undefined' ? parseInt(process.env[variable]) : defaultInt;
|
||||
}
|
||||
module.exports = { envString, envFloat, envInt };
|
||||
//# sourceMappingURL=index.js.map
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/env/index.ts"],"names":[],"mappings":";;AAAA,8BAEC;AAED,4BAEC;AAED,wBAEC;AAVD,SAAgB,SAAS,CAAE,QAAiB,EAAE,aAAsB;IACnE,OAAO,OAAO,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,KAAK,WAAW,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC;AAC7F,CAAC;AAED,SAAgB,QAAQ,CAAE,QAAiB,EAAE,YAAqB;IACjE,OAAO,OAAO,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,KAAK,WAAW,CAAC,CAAC,CAAC,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC;AACxG,CAAC;AAED,SAAgB,MAAM,CAAE,QAAiB,EAAE,UAAmB;IAC7D,OAAO,OAAO,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,KAAK,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC;AACpG,CAAC;AAED,MAAM,CAAC,OAAO,GAAG,EAAE,SAAS,EAAE,QAAQ,EAAE,MAAM,EAAE,CAAC"}
|
|
@ -0,0 +1,11 @@
|
|||
interface FileInfo {
|
||||
success: boolean;
|
||||
hash?: string;
|
||||
size?: number;
|
||||
created?: number;
|
||||
error?: Error | undefined;
|
||||
}
|
||||
export declare class File {
|
||||
static info(filePath: string): Promise<FileInfo>;
|
||||
}
|
||||
export {};
|
|
@ -0,0 +1,56 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.File = void 0;
|
||||
const promises_1 = require("fs/promises");
|
||||
const size_1 = require("../size");
|
||||
const hash_1 = require("../hash");
|
||||
class File {
|
||||
static async info(filePath) {
|
||||
let success = false;
|
||||
let hash;
|
||||
let fileSize;
|
||||
let created;
|
||||
let stats;
|
||||
let error;
|
||||
try {
|
||||
hash = await hash_1.Hashes.file(filePath);
|
||||
}
|
||||
catch (err) {
|
||||
error = err;
|
||||
return {
|
||||
success,
|
||||
error
|
||||
};
|
||||
}
|
||||
try {
|
||||
fileSize = await (0, size_1.size)(filePath);
|
||||
}
|
||||
catch (err) {
|
||||
error = err;
|
||||
return {
|
||||
success,
|
||||
error
|
||||
};
|
||||
}
|
||||
try {
|
||||
stats = await (0, promises_1.stat)(filePath);
|
||||
created = stats.birthtimeMs;
|
||||
}
|
||||
catch (err) {
|
||||
error = err;
|
||||
return {
|
||||
success,
|
||||
error
|
||||
};
|
||||
}
|
||||
success = true;
|
||||
return {
|
||||
success,
|
||||
hash,
|
||||
size: fileSize,
|
||||
created
|
||||
};
|
||||
}
|
||||
}
|
||||
exports.File = File;
|
||||
//# sourceMappingURL=index.js.map
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/file/index.ts"],"names":[],"mappings":";;;AAAA,0CAAmC;AAGnC,kCAA+B;AAC/B,kCAAiC;AAUjC,MAAa,IAAI;IACT,MAAM,CAAC,KAAK,CAAC,IAAI,CAAE,QAAiB;QAC1C,IAAI,OAAO,GAAa,KAAK,CAAC;QAC9B,IAAI,IAAa,CAAC;QAClB,IAAI,QAAiB,CAAC;QACtB,IAAI,OAAgB,CAAC;QACrB,IAAI,KAAa,CAAC;QAClB,IAAI,KAAa,CAAC;QAElB,IAAI,CAAC;YACJ,IAAI,GAAG,MAAM,aAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QACpC,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACd,KAAK,GAAG,GAAY,CAAC;YACrB,OAAO;gBACN,OAAO;gBACP,KAAK;aACL,CAAC;QACH,CAAC;QAED,IAAI,CAAC;YACJ,QAAQ,GAAG,MAAM,IAAA,WAAI,EAAC,QAAQ,CAAC,CAAA;QAChC,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACd,KAAK,GAAG,GAAY,CAAC;YACrB,OAAO;gBACN,OAAO;gBACP,KAAK;aACL,CAAC;QACH,CAAC;QAED,IAAI,CAAC;YACJ,KAAK,GAAG,MAAM,IAAA,eAAI,EAAC,QAAQ,CAAC,CAAC;YAC7B,OAAO,GAAG,KAAK,CAAC,WAAW,CAAC;QAC7B,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACd,KAAK,GAAG,GAAY,CAAC;YACrB,OAAO;gBACN,OAAO;gBACP,KAAK;aACL,CAAC;QACH,CAAC;QAED,OAAO,GAAG,IAAI,CAAC;QACf,OAAO;YACN,OAAO;YACP,IAAI;YACJ,IAAI,EAAG,QAAQ;YACf,OAAO;SACP,CAAC;IACH,CAAC;CACD;AAhDD,oBAgDC"}
|
|
@ -0,0 +1,4 @@
|
|||
export declare class Hashes {
|
||||
static file(path: string): Promise<string>;
|
||||
static string(str: string): string;
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Hashes = void 0;
|
||||
const crypto_1 = require("crypto");
|
||||
const fs_1 = require("fs");
|
||||
class Hashes {
|
||||
static async file(path) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const hashSum = (0, crypto_1.createHash)('sha256');
|
||||
const stream = (0, fs_1.createReadStream)(path);
|
||||
stream.on('error', (err) => reject(err));
|
||||
stream.on('data', (chunk) => hashSum.update(chunk));
|
||||
stream.on('end', () => resolve(hashSum.digest('hex')));
|
||||
});
|
||||
}
|
||||
static string(str) {
|
||||
const sha = (0, crypto_1.createHash)('sha256').update(str);
|
||||
return sha.digest('hex');
|
||||
}
|
||||
}
|
||||
exports.Hashes = Hashes;
|
||||
module.exports = { Hashes };
|
||||
//# sourceMappingURL=index.js.map
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/hash/index.ts"],"names":[],"mappings":";;;AAAA,mCAA0C;AAC1C,2BAAsC;AAEtC,MAAa,MAAM;IACX,MAAM,CAAC,KAAK,CAAC,IAAI,CAAE,IAAa;QACtC,OAAO,IAAI,OAAO,CAAC,CAAC,OAAkB,EAAE,MAAiB,EAAE,EAAE;YAC5D,MAAM,OAAO,GAAU,IAAA,mBAAU,EAAC,QAAQ,CAAC,CAAC;YAC5C,MAAM,MAAM,GAAS,IAAA,qBAAgB,EAAC,IAAI,CAAC,CAAC;YAC5C,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAW,EAAE,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;YAC9C,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,KAAc,EAAE,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;YAC7D,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QAC3D,CAAC,CAAC,CAAC;IACJ,CAAC;IAEM,MAAM,CAAC,MAAM,CAAE,GAAY;QACjC,MAAM,GAAG,GAAU,IAAA,mBAAU,EAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;QACpD,OAAO,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;IAC1B,CAAC;CACD;AAfD,wBAeC;AAED,MAAM,CAAC,OAAO,GAAG,EAAE,MAAM,EAAE,CAAC"}
|
|
@ -0,0 +1 @@
|
|||
export {};
|
|
@ -1,6 +1,79 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const express_1 = __importDefault(require("express"));
|
||||
const handlebars = __importStar(require("express-handlebars"));
|
||||
const body_parser_1 = __importDefault(require("body-parser"));
|
||||
const path_1 = require("path");
|
||||
const env_1 = require("./env");
|
||||
const log_1 = require("./log");
|
||||
const PORT = (0, env_1.envInt)('PORT', 3835);
|
||||
const viewsPath = (0, path_1.join)(__dirname, '../views');
|
||||
const staticPath = (0, path_1.join)(__dirname, '../static');
|
||||
const app = (0, express_1.default)();
|
||||
const log = (0, log_1.createLog)('icebox');
|
||||
const hbs = handlebars.create({
|
||||
defaultLayout: 'main',
|
||||
layoutsDir: (0, path_1.join)(viewsPath, './layouts'),
|
||||
partialsDir: (0, path_1.join)(viewsPath, '/partials')
|
||||
});
|
||||
app.use(express_1.default.static(staticPath));
|
||||
app.use(body_parser_1.default.urlencoded({ extended: true }));
|
||||
app.engine('handlebars', hbs.engine);
|
||||
app.set('view engine', 'handlebars');
|
||||
app.set('views', viewsPath);
|
||||
app.get('/', (req, res, next) => {
|
||||
const data = {};
|
||||
return res.render('home', data);
|
||||
});
|
||||
app.post('/lookup', (req, res, next) => {
|
||||
const data = {
|
||||
alert: {
|
||||
class: 'primary',
|
||||
message: 'Check your email. If one or more active download links are found they will be emailed to you.'
|
||||
}
|
||||
};
|
||||
if (typeof req.body !== 'undefined') {
|
||||
console.dir(req.body);
|
||||
}
|
||||
return res.render('home', data);
|
||||
});
|
||||
/*
|
||||
import express from 'express';
|
||||
import http from 'http';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
|
@ -9,13 +82,13 @@ import { RTCPeerConnection, RTCSessionDescription, RTCIceCandidate } from 'wrtc'
|
|||
|
||||
// Define types
|
||||
interface FileTransferSession {
|
||||
peerConnection: RTCPeerConnection;
|
||||
dataChannel?: RTCDataChannel;
|
||||
fileStream?: fs.ReadStream;
|
||||
filePath: string;
|
||||
fileSize: number;
|
||||
chunkSize: number;
|
||||
sentBytes: number;
|
||||
peerConnection: RTCPeerConnection;
|
||||
dataChannel?: RTCDataChannel;
|
||||
fileStream?: fs.ReadStream;
|
||||
filePath: string;
|
||||
fileSize: number;
|
||||
chunkSize: number;
|
||||
sentBytes: number;
|
||||
}
|
||||
|
||||
// Initialize express app
|
||||
|
@ -32,241 +105,232 @@ const sessions: Map<string, FileTransferSession> = new Map();
|
|||
|
||||
// Configure WebRTC ICE servers (STUN/TURN)
|
||||
const iceServers = [
|
||||
{ urls: 'stun:stun.l.google.com:19302' },
|
||||
{ urls: 'stun:stun1.l.google.com:19302' }
|
||||
{ urls: 'stun:stun.l.google.com:19302' },
|
||||
{ urls: 'stun:stun1.l.google.com:19302' }
|
||||
];
|
||||
|
||||
io.on('connection', (socket) => {
|
||||
console.log('Client connected:', socket.id);
|
||||
console.log('Client connected:', socket.id);
|
||||
|
||||
// Handle request for available files
|
||||
socket.on('get-files', () => {
|
||||
const filesDirectory = path.join(__dirname, 'files');
|
||||
try {
|
||||
const files = fs.readdirSync(filesDirectory)
|
||||
.filter(file => fs.statSync(path.join(filesDirectory, file)).isFile())
|
||||
.map(file => {
|
||||
const filePath = path.join(filesDirectory, file);
|
||||
const stats = fs.statSync(filePath);
|
||||
return {
|
||||
name: file,
|
||||
size: stats.size,
|
||||
modified: stats.mtime
|
||||
};
|
||||
// Handle request for available files
|
||||
socket.on('get-files', () => {
|
||||
const filesDirectory = path.join(__dirname, 'files');
|
||||
try {
|
||||
const files = fs.readdirSync(filesDirectory)
|
||||
.filter(file => fs.statSync(path.join(filesDirectory, file)).isFile())
|
||||
.map(file => {
|
||||
const filePath = path.join(filesDirectory, file);
|
||||
const stats = fs.statSync(filePath);
|
||||
return {
|
||||
name: file,
|
||||
size: stats.size,
|
||||
modified: stats.mtime
|
||||
};
|
||||
});
|
||||
socket.emit('files-list', files);
|
||||
} catch (err) {
|
||||
console.error('Error reading files directory:', err);
|
||||
socket.emit('error', 'Failed to retrieve files list');
|
||||
}
|
||||
});
|
||||
|
||||
// Handle file transfer request
|
||||
socket.on('request-file', (fileName: string) => {
|
||||
const filePath = path.join(__dirname, 'files', fileName);
|
||||
|
||||
// Check if file exists
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return socket.emit('error', 'File not found');
|
||||
}
|
||||
|
||||
const fileSize = fs.statSync(filePath).size;
|
||||
const chunkSize = 16384; // 16KB chunks
|
||||
|
||||
// Create and configure peer connection
|
||||
const peerConnection = new RTCPeerConnection({ iceServers });
|
||||
|
||||
// Create data channel
|
||||
const dataChannel = peerConnection.createDataChannel('fileTransfer', {
|
||||
ordered: true
|
||||
});
|
||||
socket.emit('files-list', files);
|
||||
} catch (err) {
|
||||
console.error('Error reading files directory:', err);
|
||||
socket.emit('error', 'Failed to retrieve files list');
|
||||
}
|
||||
});
|
||||
|
||||
// Handle file transfer request
|
||||
socket.on('request-file', (fileName: string) => {
|
||||
const filePath = path.join(__dirname, 'files', fileName);
|
||||
|
||||
// Check if file exists
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return socket.emit('error', 'File not found');
|
||||
}
|
||||
|
||||
const fileSize = fs.statSync(filePath).size;
|
||||
const chunkSize = 16384; // 16KB chunks
|
||||
|
||||
// Create and configure peer connection
|
||||
const peerConnection = new RTCPeerConnection({ iceServers });
|
||||
|
||||
// Create data channel
|
||||
const dataChannel = peerConnection.createDataChannel('fileTransfer', {
|
||||
ordered: true
|
||||
|
||||
// Store session info
|
||||
sessions.set(socket.id, {
|
||||
peerConnection,
|
||||
dataChannel,
|
||||
filePath,
|
||||
fileSize,
|
||||
chunkSize,
|
||||
sentBytes: 0
|
||||
});
|
||||
|
||||
// Handle ICE candidates
|
||||
peerConnection.onicecandidate = (event) => {
|
||||
if (event.candidate) {
|
||||
socket.emit('ice-candidate', event.candidate);
|
||||
}
|
||||
};
|
||||
|
||||
// Set up data channel handlers
|
||||
dataChannel.onopen = () => {
|
||||
console.log(`Data channel opened for client ${socket.id}`);
|
||||
startFileTransfer(socket.id);
|
||||
};
|
||||
|
||||
dataChannel.onclose = () => {
|
||||
console.log(`Data channel closed for client ${socket.id}`);
|
||||
cleanupSession(socket.id);
|
||||
};
|
||||
|
||||
dataChannel.onerror = (error) => {
|
||||
console.error(`Data channel error for client ${socket.id}:`, error);
|
||||
cleanupSession(socket.id);
|
||||
};
|
||||
|
||||
// Create offer
|
||||
peerConnection.createOffer()
|
||||
.then(offer => peerConnection.setLocalDescription(offer))
|
||||
.then(() => {
|
||||
socket.emit('offer', {
|
||||
sdp: peerConnection.localDescription,
|
||||
fileInfo: {
|
||||
name: path.basename(filePath),
|
||||
size: fileSize
|
||||
}
|
||||
});
|
||||
})
|
||||
.catch(err => {
|
||||
console.error('Error creating offer:', err);
|
||||
socket.emit('error', 'Failed to create connection offer');
|
||||
cleanupSession(socket.id);
|
||||
});
|
||||
});
|
||||
|
||||
// Store session info
|
||||
sessions.set(socket.id, {
|
||||
peerConnection,
|
||||
dataChannel,
|
||||
filePath,
|
||||
fileSize,
|
||||
chunkSize,
|
||||
sentBytes: 0
|
||||
// Handle answer from browser
|
||||
socket.on('answer', async (answer: RTCSessionDescription) => {
|
||||
try {
|
||||
const session = sessions.get(socket.id);
|
||||
if (!session) return;
|
||||
|
||||
await session.peerConnection.setRemoteDescription(new RTCSessionDescription(answer));
|
||||
console.log(`Connection established with client ${socket.id}`);
|
||||
} catch (err) {
|
||||
console.error('Error setting remote description:', err);
|
||||
socket.emit('error', 'Failed to establish connection');
|
||||
cleanupSession(socket.id);
|
||||
}
|
||||
});
|
||||
|
||||
// Handle ICE candidates
|
||||
peerConnection.onicecandidate = (event) => {
|
||||
if (event.candidate) {
|
||||
socket.emit('ice-candidate', event.candidate);
|
||||
}
|
||||
};
|
||||
// Handle ICE candidates from browser
|
||||
socket.on('ice-candidate', async (candidate: RTCIceCandidate) => {
|
||||
try {
|
||||
const session = sessions.get(socket.id);
|
||||
if (!session) return;
|
||||
|
||||
await session.peerConnection.addIceCandidate(new RTCIceCandidate(candidate));
|
||||
} catch (err) {
|
||||
console.error('Error adding ICE candidate:', err);
|
||||
}
|
||||
});
|
||||
|
||||
// Set up data channel handlers
|
||||
dataChannel.onopen = () => {
|
||||
console.log(`Data channel opened for client ${socket.id}`);
|
||||
startFileTransfer(socket.id);
|
||||
};
|
||||
|
||||
dataChannel.onclose = () => {
|
||||
console.log(`Data channel closed for client ${socket.id}`);
|
||||
cleanupSession(socket.id);
|
||||
};
|
||||
|
||||
dataChannel.onerror = (error) => {
|
||||
console.error(`Data channel error for client ${socket.id}:`, error);
|
||||
cleanupSession(socket.id);
|
||||
};
|
||||
|
||||
// Create offer
|
||||
peerConnection.createOffer()
|
||||
.then(offer => peerConnection.setLocalDescription(offer))
|
||||
.then(() => {
|
||||
socket.emit('offer', {
|
||||
sdp: peerConnection.localDescription,
|
||||
fileInfo: {
|
||||
name: path.basename(filePath),
|
||||
size: fileSize
|
||||
}
|
||||
});
|
||||
})
|
||||
.catch(err => {
|
||||
console.error('Error creating offer:', err);
|
||||
socket.emit('error', 'Failed to create connection offer');
|
||||
// Handle client disconnection
|
||||
socket.on('disconnect', () => {
|
||||
console.log('Client disconnected:', socket.id);
|
||||
cleanupSession(socket.id);
|
||||
});
|
||||
});
|
||||
|
||||
// Handle answer from browser
|
||||
socket.on('answer', async (answer: RTCSessionDescription) => {
|
||||
try {
|
||||
const session = sessions.get(socket.id);
|
||||
if (!session) return;
|
||||
|
||||
await session.peerConnection.setRemoteDescription(new RTCSessionDescription(answer));
|
||||
console.log(`Connection established with client ${socket.id}`);
|
||||
} catch (err) {
|
||||
console.error('Error setting remote description:', err);
|
||||
socket.emit('error', 'Failed to establish connection');
|
||||
cleanupSession(socket.id);
|
||||
}
|
||||
});
|
||||
|
||||
// Handle ICE candidates from browser
|
||||
socket.on('ice-candidate', async (candidate: RTCIceCandidate) => {
|
||||
try {
|
||||
const session = sessions.get(socket.id);
|
||||
if (!session) return;
|
||||
|
||||
await session.peerConnection.addIceCandidate(new RTCIceCandidate(candidate));
|
||||
} catch (err) {
|
||||
console.error('Error adding ICE candidate:', err);
|
||||
}
|
||||
});
|
||||
|
||||
// Handle client disconnection
|
||||
socket.on('disconnect', () => {
|
||||
console.log('Client disconnected:', socket.id);
|
||||
cleanupSession(socket.id);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Start file transfer
|
||||
function startFileTransfer(socketId: string): void {
|
||||
const session = sessions.get(socketId);
|
||||
if (!session || !session.dataChannel) return;
|
||||
|
||||
// Send file info first
|
||||
session.dataChannel.send(JSON.stringify({
|
||||
type: 'file-info',
|
||||
name: path.basename(session.filePath),
|
||||
size: session.fileSize
|
||||
}));
|
||||
|
||||
// Open file stream
|
||||
session.fileStream = fs.createReadStream(session.filePath, {
|
||||
highWaterMark: session.chunkSize
|
||||
});
|
||||
|
||||
// Process file chunks
|
||||
session.fileStream.on('data', (chunk: Buffer) => {
|
||||
// Check if data channel is still open and ready
|
||||
if (session.dataChannel?.readyState === 'open') {
|
||||
// Pause the stream to handle backpressure
|
||||
session.fileStream?.pause();
|
||||
|
||||
// Send chunk as ArrayBuffer
|
||||
session.dataChannel.send(chunk);
|
||||
session.sentBytes += chunk.length;
|
||||
|
||||
// Report progress
|
||||
if (session.sentBytes % (5 * 1024 * 1024) === 0) { // Every 5MB
|
||||
console.log(`Sent ${session.sentBytes / (1024 * 1024)}MB of ${session.fileSize / (1024 * 1024)}MB`);
|
||||
}
|
||||
|
||||
// Check buffer status before resuming
|
||||
const bufferAmount = session.dataChannel.bufferedAmount;
|
||||
if (bufferAmount < session.chunkSize * 2) {
|
||||
// Resume reading if buffer is below threshold
|
||||
session.fileStream?.resume();
|
||||
} else {
|
||||
// Wait for buffer to drain
|
||||
const checkBuffer = setInterval(() => {
|
||||
if (session.dataChannel?.bufferedAmount < session.chunkSize) {
|
||||
clearInterval(checkBuffer);
|
||||
session.fileStream?.resume();
|
||||
}
|
||||
}, 100);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Handle end of file
|
||||
session.fileStream.on('end', () => {
|
||||
if (session.dataChannel?.readyState === 'open') {
|
||||
session.dataChannel.send(JSON.stringify({ type: 'file-complete' }));
|
||||
console.log(`File transfer complete for client ${socketId}`);
|
||||
}
|
||||
});
|
||||
|
||||
// Handle file stream errors
|
||||
session.fileStream.on('error', (err) => {
|
||||
console.error(`File stream error for client ${socketId}:`, err);
|
||||
if (session.dataChannel?.readyState === 'open') {
|
||||
session.dataChannel.send(JSON.stringify({
|
||||
type: 'error',
|
||||
message: 'File read error on server'
|
||||
}));
|
||||
}
|
||||
cleanupSession(socketId);
|
||||
});
|
||||
const session = sessions.get(socketId);
|
||||
if (!session || !session.dataChannel) return;
|
||||
|
||||
// Send file info first
|
||||
session.dataChannel.send(JSON.stringify({
|
||||
type: 'file-info',
|
||||
name: path.basename(session.filePath),
|
||||
size: session.fileSize
|
||||
}));
|
||||
|
||||
// Open file stream
|
||||
session.fileStream = fs.createReadStream(session.filePath, {
|
||||
highWaterMark: session.chunkSize
|
||||
});
|
||||
|
||||
// Process file chunks
|
||||
session.fileStream.on('data', (chunk: Buffer) => {
|
||||
// Check if data channel is still open and ready
|
||||
if (session.dataChannel?.readyState === 'open') {
|
||||
// Pause the stream to handle backpressure
|
||||
session.fileStream?.pause();
|
||||
|
||||
// Send chunk as ArrayBuffer
|
||||
session.dataChannel.send(chunk);
|
||||
session.sentBytes += chunk.length;
|
||||
|
||||
// Report progress
|
||||
if (session.sentBytes % (5 * 1024 * 1024) === 0) { // Every 5MB
|
||||
console.log(`Sent ${session.sentBytes / (1024 * 1024)}MB of ${session.fileSize / (1024 * 1024)}MB`);
|
||||
}
|
||||
|
||||
// Check buffer status before resuming
|
||||
const bufferAmount = session.dataChannel.bufferedAmount;
|
||||
if (bufferAmount < session.chunkSize * 2) {
|
||||
// Resume reading if buffer is below threshold
|
||||
session.fileStream?.resume();
|
||||
} else {
|
||||
// Wait for buffer to drain
|
||||
const checkBuffer = setInterval(() => {
|
||||
if (session.dataChannel?.bufferedAmount < session.chunkSize) {
|
||||
clearInterval(checkBuffer);
|
||||
session.fileStream?.resume();
|
||||
}
|
||||
}, 100);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Handle end of file
|
||||
session.fileStream.on('end', () => {
|
||||
if (session.dataChannel?.readyState === 'open') {
|
||||
session.dataChannel.send(JSON.stringify({ type: 'file-complete' }));
|
||||
console.log(`File transfer complete for client ${socketId}`);
|
||||
}
|
||||
});
|
||||
|
||||
// Handle file stream errors
|
||||
session.fileStream.on('error', (err) => {
|
||||
console.error(`File stream error for client ${socketId}:`, err);
|
||||
if (session.dataChannel?.readyState === 'open') {
|
||||
session.dataChannel.send(JSON.stringify({
|
||||
type: 'error',
|
||||
message: 'File read error on server'
|
||||
}));
|
||||
}
|
||||
cleanupSession(socketId);
|
||||
});
|
||||
}
|
||||
|
||||
// Clean up session resources
|
||||
function cleanupSession(socketId: string): void {
|
||||
const session = sessions.get(socketId);
|
||||
if (!session) return;
|
||||
|
||||
if (session.fileStream) {
|
||||
session.fileStream.destroy();
|
||||
}
|
||||
|
||||
if (session.dataChannel && session.dataChannel.readyState === 'open') {
|
||||
session.dataChannel.close();
|
||||
}
|
||||
|
||||
session.peerConnection.close();
|
||||
sessions.delete(socketId);
|
||||
console.log(`Cleaned up session for client ${socketId}`);
|
||||
const session = sessions.get(socketId);
|
||||
if (!session) return;
|
||||
|
||||
if (session.fileStream) {
|
||||
session.fileStream.destroy();
|
||||
}
|
||||
|
||||
if (session.dataChannel && session.dataChannel.readyState === 'open') {
|
||||
session.dataChannel.close();
|
||||
}
|
||||
|
||||
session.peerConnection.close();
|
||||
sessions.delete(socketId);
|
||||
console.log(`Cleaned up session for client ${socketId}`);
|
||||
}
|
||||
|
||||
// Start the server
|
||||
server.listen(PORT, () => {
|
||||
console.log(`Server running on port ${PORT}`);
|
||||
|
||||
// Create files directory if it doesn't exist
|
||||
const filesDir = path.join(__dirname, 'files');
|
||||
if (!fs.existsSync(filesDir)) {
|
||||
fs.mkdirSync(filesDir);
|
||||
console.log('Created files directory');
|
||||
}
|
||||
*/
|
||||
app.listen(PORT, () => {
|
||||
log.info(`Server running on port ${PORT}`);
|
||||
});
|
||||
|
||||
*/
|
||||
//# sourceMappingURL=index.js.map
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA6QE"}
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AACA,sDAA8B;AAC9B,+DAAiD;AAEjD,8DAAqC;AACrC,+BAA4B;AAG5B,+BAA+B;AAC/B,+BAAkC;AAElC,MAAM,IAAI,GAAY,IAAA,YAAM,EAAC,MAAM,EAAE,IAAI,CAAC,CAAC;AAC3C,MAAM,SAAS,GAAY,IAAA,WAAI,EAAC,SAAS,EAAE,UAAU,CAAC,CAAC;AACvD,MAAM,UAAU,GAAY,IAAA,WAAI,EAAC,SAAS,EAAE,WAAW,CAAC,CAAC;AAEzD,MAAM,GAAG,GAAa,IAAA,iBAAO,GAAE,CAAC;AAChC,MAAM,GAAG,GAAY,IAAA,eAAS,EAAC,QAAQ,CAAC,CAAC;AACzC,MAAM,GAAG,GAAmC,UAAU,CAAC,MAAM,CAAC;IAC1D,aAAa,EAAE,MAAM;IACrB,UAAU,EAAE,IAAA,WAAI,EAAC,SAAS,EAAE,WAAW,CAAC;IACxC,WAAW,EAAE,IAAA,WAAI,EAAC,SAAS,EAAE,WAAW,CAAC;CAC5C,CAAC,CAAC;AAEH,GAAG,CAAC,GAAG,CAAC,iBAAO,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC;AACpC,GAAG,CAAC,GAAG,CAAC,qBAAU,CAAC,UAAU,CAAC,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;AACnD,GAAG,CAAC,MAAM,CAAC,YAAY,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;AACrC,GAAG,CAAC,GAAG,CAAC,aAAa,EAAE,YAAY,CAAC,CAAC;AACrC,GAAG,CAAC,GAAG,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;AAE5B,GAAG,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC,GAAa,EAAE,GAAc,EAAE,IAAmB,EAAE,EAAE;IAChE,MAAM,IAAI,GAAS,EAAE,CAAC;IACtB,OAAO,GAAG,CAAC,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;AACpC,CAAC,CAAC,CAAC;AAEH,GAAG,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC,GAAa,EAAE,GAAc,EAAE,IAAmB,EAAE,EAAE;IACvE,MAAM,IAAI,GAAS;QACf,KAAK,EAAG;YACJ,KAAK,EAAE,SAAS;YAChB,OAAO,EAAG,+FAA+F;SAC5G;KACJ,CAAC;IACF,IAAI,OAAO,GAAG,CAAC,IAAI,KAAK,WAAW,EAAE,CAAC;QAClC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;IAC1B,CAAC;IACD,OAAO,GAAG,CAAC,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;AACpC,CAAC,CAAC,CAAA;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAgQE;AAEF,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,EAAE;IAClB,GAAG,CAAC,IAAI,CAAC,0BAA0B,IAAI,EAAE,CAAC,CAAC;AAC/C,CAAC,CAAC,CAAC"}
|
|
@ -1 +1 @@
|
|||
export {};
|
||||
import 'dotenv/config';
|
||||
|
|
|
@ -3,6 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
require("dotenv/config");
|
||||
const upload_1 = require("../upload");
|
||||
const log_1 = require("../log");
|
||||
const chokidar_1 = __importDefault(require("chokidar"));
|
||||
|
@ -30,4 +31,43 @@ async function main() {
|
|||
});
|
||||
}
|
||||
main().catch(log.error);
|
||||
/*
|
||||
const minioResult = await uploadFileToS3('/path/to/your/file.pdf', {
|
||||
region: 'us-east-1', // Region can be any string for MinIO
|
||||
endpoint: 'https://minio.your-domain.com',
|
||||
bucketName: 'your-minio-bucket',
|
||||
credentials: {
|
||||
accessKeyId: 'your-minio-access-key',
|
||||
secretAccessKey: 'your-minio-secret-key'
|
||||
},
|
||||
forcePathStyle: true, // Important for most S3-compatible services
|
||||
expirationSeconds: 3600
|
||||
});
|
||||
|
||||
// Example 3: DigitalOcean Spaces
|
||||
const spacesResult = await uploadFileToS3('/path/to/your/file.pdf', {
|
||||
region: 'nyc3', // DigitalOcean datacenter region
|
||||
endpoint: 'https://nyc3.digitaloceanspaces.com',
|
||||
bucketName: 'your-space-name',
|
||||
credentials: {
|
||||
accessKeyId: 'your-spaces-key',
|
||||
secretAccessKey: 'your-spaces-secret'
|
||||
},
|
||||
forcePathStyle: true,
|
||||
expirationSeconds: 7200 // 2 hours
|
||||
});
|
||||
|
||||
// Example 4: Wasabi
|
||||
const wasabiResult = await uploadFileToS3('/path/to/your/file.pdf', {
|
||||
region: 'us-east-1',
|
||||
endpoint: 'https://s3.wasabisys.com',
|
||||
bucketName: 'your-wasabi-bucket',
|
||||
credentials: {
|
||||
accessKeyId: 'your-wasabi-access-key',
|
||||
secretAccessKey: 'your-wasabi-secret-key'
|
||||
},
|
||||
forcePathStyle: true,
|
||||
expirationSeconds: 86400 // 24 hours
|
||||
});
|
||||
*/
|
||||
//# sourceMappingURL=index.js.map
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/monitor/index.ts"],"names":[],"mappings":";;;;;AAAA,sCAAmC;AACnC,gCAAmC;AAEnC,wDAAgC;AAIhC,MAAM,UAAU,GAAY,IAAI,CAAC,CAAC,QAAQ;AAE1C,MAAM,GAAG,GAAY,IAAA,eAAS,EAAC,OAAO,CAAC,CAAC;AAExC,KAAK,UAAU,aAAa,CAAE,QAAiB;IAC9C,MAAM,MAAM,GAAc;QACzB,MAAM,EAAE,WAAW;QACnB,UAAU,EAAE,kBAAkB;QAC9B,iBAAiB,EAAE,UAAU;KAC7B,CAAC;IAEF,GAAG,CAAC,IAAI,CAAC,mBAAmB,QAAQ,EAAE,CAAC,CAAC;IACxC,MAAM,MAAM,GAAkB,MAAM,IAAA,eAAM,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC;IAErE,IAAI,MAAM,CAAC,OAAO,EAAE,CAAC;QACpB,GAAG,CAAC,IAAI,CAAC,yCAAyC,CAAC,CAAC;QACpD,GAAG,CAAC,IAAI,CAAC,cAAc,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC;IACtC,CAAC;SAAM,CAAC;QACP,GAAG,CAAC,KAAK,CAAC,gBAAgB,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;IAC3C,CAAC;AACF,CAAC;AAED,KAAK,UAAU,IAAI;IAClB,kBAAQ,CAAC,KAAK,CAAC,SAAS,EAAE,EAAE,OAAO,EAAE,cAAc,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,EAAE,CAAC,KAAW,EAAE,IAAa,EAAE,EAAE;QAC/F,GAAG,CAAC,IAAI,CAAC,QAAQ,IAAI,4BAA4B,KAAK,EAAE,CAAC,CAAC;IAC3D,CAAC,CAAC,CAAC;AACJ,CAAC;AAED,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC"}
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/monitor/index.ts"],"names":[],"mappings":";;;;;AAAA,yBAAuB;AAEvB,sCAAmC;AACnC,gCAAmC;AAEnC,wDAAgC;AAIhC,MAAM,UAAU,GAAY,IAAI,CAAC,CAAC,QAAQ;AAE1C,MAAM,GAAG,GAAY,IAAA,eAAS,EAAC,OAAO,CAAC,CAAC;AAExC,KAAK,UAAU,aAAa,CAAE,QAAiB;IAC9C,MAAM,MAAM,GAAc;QACzB,MAAM,EAAE,WAAW;QACnB,UAAU,EAAE,kBAAkB;QAC9B,iBAAiB,EAAE,UAAU;KAC7B,CAAC;IAEF,GAAG,CAAC,IAAI,CAAC,mBAAmB,QAAQ,EAAE,CAAC,CAAC;IACxC,MAAM,MAAM,GAAkB,MAAM,IAAA,eAAM,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC;IAErE,IAAI,MAAM,CAAC,OAAO,EAAE,CAAC;QACpB,GAAG,CAAC,IAAI,CAAC,yCAAyC,CAAC,CAAC;QACpD,GAAG,CAAC,IAAI,CAAC,cAAc,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC;IACtC,CAAC;SAAM,CAAC;QACP,GAAG,CAAC,KAAK,CAAC,gBAAgB,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;IAC3C,CAAC;AACF,CAAC;AAED,KAAK,UAAU,IAAI;IAClB,kBAAQ,CAAC,KAAK,CAAC,SAAS,EAAE,EAAE,OAAO,EAAE,cAAc,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,EAAE,CAAC,KAAW,EAAE,IAAa,EAAE,EAAE;QAC/F,GAAG,CAAC,IAAI,CAAC,QAAQ,IAAI,4BAA4B,KAAK,EAAE,CAAC,CAAC;IAC3D,CAAC,CAAC,CAAC;AACJ,CAAC;AAED,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;AAExB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAsCE"}
|
|
@ -0,0 +1 @@
|
|||
export declare function shortid(): string;
|
|
@ -0,0 +1,10 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.shortid = shortid;
|
||||
function shortid() {
|
||||
let firstPart = (Math.random() * 46656) | 0;
|
||||
let secondPart = (Math.random() * 46656) | 0;
|
||||
return ("000" + firstPart.toString(36)).slice(-3) + ("000" + secondPart.toString(36)).slice(-3);
|
||||
}
|
||||
module.exports = { shortid };
|
||||
//# sourceMappingURL=index.js.map
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/shortid/index.ts"],"names":[],"mappings":";;AAAA,0BAIC;AAJD,SAAgB,OAAO;IACnB,IAAI,SAAS,GAAY,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC;IACrD,IAAI,UAAU,GAAY,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC;IACtD,OAAO,CAAC,KAAK,GAAG,SAAS,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,GAAG,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;AACpG,CAAC;AAED,MAAM,CAAC,OAAO,GAAG,EAAE,OAAO,EAAE,CAAC"}
|
|
@ -0,0 +1 @@
|
|||
export declare function size(filePath: string): Promise<number>;
|
|
@ -0,0 +1,10 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.size = size;
|
||||
const promises_1 = require("fs/promises");
|
||||
async function size(filePath) {
|
||||
const stats = await (0, promises_1.stat)(filePath);
|
||||
return stats.size;
|
||||
}
|
||||
module.exports = { size };
|
||||
//# sourceMappingURL=index.js.map
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/size/index.ts"],"names":[],"mappings":";;AAGA,oBAGC;AAND,0CAAmC;AAG5B,KAAK,UAAU,IAAI,CAAE,QAAiB;IAC5C,MAAM,KAAK,GAAW,MAAM,IAAA,eAAI,EAAC,QAAQ,CAAC,CAAC;IAC3C,OAAO,KAAK,CAAC,IAAI,CAAC;AACnB,CAAC;AAED,MAAM,CAAC,OAAO,GAAG,EAAE,IAAI,EAAE,CAAC"}
|
|
@ -0,0 +1 @@
|
|||
export declare function tmp(ext?: string, prefix?: string, fileName?: string | null): string;
|
|
@ -0,0 +1,17 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.tmp = tmp;
|
||||
const os_1 = require("os");
|
||||
const path_1 = require("path");
|
||||
const crypto_1 = require("crypto");
|
||||
const TMP = (0, os_1.tmpdir)();
|
||||
function tmp(ext = '', prefix = '3d-tmp-', fileName = null) {
|
||||
let path = null;
|
||||
if (fileName === null) {
|
||||
fileName = prefix + (0, crypto_1.randomBytes)(32).toString('hex') + ext;
|
||||
}
|
||||
path = (0, path_1.join)(TMP, fileName);
|
||||
return path;
|
||||
}
|
||||
module.exports = { tmp };
|
||||
//# sourceMappingURL=index.js.map
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/tmp/index.ts"],"names":[],"mappings":";;AAMA,kBAOC;AAbD,2BAA4B;AAC5B,+BAA4B;AAC5B,mCAAqC;AAErC,MAAM,GAAG,GAAY,IAAA,WAAM,GAAE,CAAC;AAE9B,SAAgB,GAAG,CAAE,MAAe,EAAE,EAAE,SAAkB,SAAS,EAAE,WAA2B,IAAI;IACnG,IAAI,IAAI,GAAmB,IAAI,CAAC;IAChC,IAAI,QAAQ,KAAK,IAAI,EAAE,CAAC;QACvB,QAAQ,GAAG,MAAM,GAAG,IAAA,oBAAW,EAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,GAAG,CAAC;IAC3D,CAAC;IACD,IAAI,GAAG,IAAA,WAAI,EAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;IAC3B,OAAO,IAAI,CAAC;AACb,CAAC;AAED,MAAM,CAAC,OAAO,GAAG,EAAE,GAAG,EAAE,CAAC"}
|
|
@ -2,6 +2,12 @@ interface S3Config {
|
|||
region: string;
|
||||
bucketName: string;
|
||||
expirationSeconds: number;
|
||||
endpoint?: string;
|
||||
credentials?: {
|
||||
accessKeyId: string;
|
||||
secretAccessKey: string;
|
||||
};
|
||||
forcePathStyle?: boolean;
|
||||
}
|
||||
interface UploadResult {
|
||||
success: boolean;
|
||||
|
|
|
@ -33,9 +33,19 @@ async function upload(id, filePath, config) {
|
|||
error: `Error reading file: ${error.message}`
|
||||
};
|
||||
}
|
||||
const s3Client = new client_s3_1.S3Client({
|
||||
const s3ClientOptions = {
|
||||
region: config.region
|
||||
});
|
||||
};
|
||||
if (config.endpoint) {
|
||||
s3ClientOptions.endpoint = config.endpoint;
|
||||
}
|
||||
if (config.credentials) {
|
||||
s3ClientOptions.credentials = config.credentials;
|
||||
}
|
||||
if (config.forcePathStyle !== undefined) {
|
||||
s3ClientOptions.forcePathStyle = config.forcePathStyle;
|
||||
}
|
||||
const s3Client = new client_s3_1.S3Client(s3ClientOptions);
|
||||
try {
|
||||
const uploadCommand = new client_s3_1.PutObjectCommand({
|
||||
Bucket: config.bucketName,
|
||||
|
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/upload/index.ts"],"names":[],"mappings":";;AA0BA,wBA2DC;AArFD,kDAAkF;AAClF,wEAA6D;AAC7D,2BAAkD;AAClD,+BAAkD;AAiBlD;;;;;GAKG;AACI,KAAK,UAAU,MAAM,CAAE,EAAU,EAAE,QAAgB,EAAE,MAAgB;IAC3E,MAAM,QAAQ,GAAY,IAAA,cAAO,EAAC,QAAQ,CAAC,CAAC;IAC5C,MAAM,QAAQ,GAAY,IAAA,eAAQ,EAAC,QAAQ,CAAC,CAAC;IAC7C,MAAM,OAAO,GAAa,IAAA,cAAO,EAAC,QAAQ,CAAC,CAAC;IAC5C,MAAM,GAAG,GAAW,GAAG,EAAE,GAAG,OAAO,EAAE,CAAC;IAEtC,IAAI,CAAC,GAAG,EAAE,CAAC;QACV,OAAO;YACN,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,sBAAsB;SAC7B,CAAC;IACH,CAAC;IAED,IAAI,UAAuB,CAAC;IAE5B,IAAI,CAAC;QACJ,UAAU,GAAG,IAAA,qBAAgB,EAAC,QAAQ,CAAC,CAAC;IACzC,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACd,MAAM,KAAK,GAAU,GAAY,CAAC;QAClC,OAAO;YACN,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,uBAAuB,KAAK,CAAC,OAAO,EAAE;SAC7C,CAAC;IACH,CAAC;IAED,MAAM,QAAQ,GAAa,IAAI,oBAAQ,CAAC;QACvC,MAAM,EAAE,MAAM,CAAC,MAAM;KACrB,CAAC,CAAC;IAEH,IAAI,CAAC;QACJ,MAAM,aAAa,GAAqB,IAAI,4BAAgB,CAAC;YAC5D,MAAM,EAAE,MAAM,CAAC,UAAU;YACzB,GAAG,EAAE,GAAG;YACR,IAAI,EAAE,UAAU;SAChB,CAAC,CAAC;QAEH,MAAM,QAAQ,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;QAEnC,MAAM,UAAU,GAAqB,IAAI,4BAAgB,CAAC;YACzD,MAAM,EAAE,MAAM,CAAC,UAAU;YACzB,GAAG,EAAE,GAAG;SACR,CAAC,CAAC;QAEH,MAAM,GAAG,GAAW,MAAM,IAAA,mCAAY,EAAC,QAAQ,EAAE,UAAU,EAAE;YAC5D,SAAS,EAAE,MAAM,CAAC,iBAAiB;SACnC,CAAC,CAAC;QAEH,OAAO;YACN,OAAO,EAAE,IAAI;YACb,GAAG;YACH,GAAG;SACH,CAAC;IACH,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACd,MAAM,KAAK,GAAW,GAAY,CAAC;QACnC,OAAO;YACN,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,0BAA0B,KAAK,CAAC,OAAO,EAAE;SAChD,CAAC;IACH,CAAC;AACF,CAAC;AAED,MAAM,CAAC,OAAO,GAAG,EAAE,MAAM,EAAE,CAAC"}
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/upload/index.ts"],"names":[],"mappings":";;AA+BA,wBAyEC;AAxGD,kDAAkF;AAClF,wEAA6D;AAC7D,2BAAkD;AAClD,+BAAkD;AAsBlD;;;;;GAKG;AACI,KAAK,UAAU,MAAM,CAAE,EAAU,EAAE,QAAgB,EAAE,MAAgB;IACxE,MAAM,QAAQ,GAAY,IAAA,cAAO,EAAC,QAAQ,CAAC,CAAC;IAC5C,MAAM,QAAQ,GAAY,IAAA,eAAQ,EAAC,QAAQ,CAAC,CAAC;IAC7C,MAAM,OAAO,GAAa,IAAA,cAAO,EAAC,QAAQ,CAAC,CAAC;IAC5C,MAAM,GAAG,GAAW,GAAG,EAAE,GAAG,OAAO,EAAE,CAAC;IAEtC,IAAI,CAAC,GAAG,EAAE,CAAC;QACP,OAAO;YACH,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,sBAAsB;SAChC,CAAC;IACN,CAAC;IAED,IAAI,UAAuB,CAAC;IAE5B,IAAI,CAAC;QACD,UAAU,GAAG,IAAA,qBAAgB,EAAC,QAAQ,CAAC,CAAC;IAC5C,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACX,MAAM,KAAK,GAAU,GAAY,CAAC;QAClC,OAAO;YACH,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,uBAAuB,KAAK,CAAC,OAAO,EAAE;SAChD,CAAC;IACN,CAAC;IAED,MAAM,eAAe,GAAwB;QACzC,MAAM,EAAE,MAAM,CAAC,MAAM;KACxB,CAAC;IAEF,IAAI,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,eAAe,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,CAAC;IAC/C,CAAC;IAED,IAAI,MAAM,CAAC,WAAW,EAAE,CAAC;QACrB,eAAe,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;IACrD,CAAC;IAED,IAAI,MAAM,CAAC,cAAc,KAAK,SAAS,EAAE,CAAC;QACtC,eAAe,CAAC,cAAc,GAAG,MAAM,CAAC,cAAc,CAAC;IAC3D,CAAC;IAED,MAAM,QAAQ,GAAa,IAAI,oBAAQ,CAAC,eAAe,CAAC,CAAC;IAEzD,IAAI,CAAC;QACD,MAAM,aAAa,GAAqB,IAAI,4BAAgB,CAAC;YACzD,MAAM,EAAE,MAAM,CAAC,UAAU;YACzB,GAAG,EAAE,GAAG;YACR,IAAI,EAAE,UAAU;SACnB,CAAC,CAAC;QAEH,MAAM,QAAQ,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;QAEnC,MAAM,UAAU,GAAqB,IAAI,4BAAgB,CAAC;YACtD,MAAM,EAAE,MAAM,CAAC,UAAU;YACzB,GAAG,EAAE,GAAG;SACX,CAAC,CAAC;QAEH,MAAM,GAAG,GAAY,MAAM,IAAA,mCAAY,EAAC,QAAQ,EAAE,UAAU,EAAE;YAC1D,SAAS,EAAE,MAAM,CAAC,iBAAiB;SACtC,CAAC,CAAC;QAEH,OAAO;YACH,OAAO,EAAE,IAAI;YACb,GAAG;YACH,GAAG;SACN,CAAC;IACN,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACX,MAAM,KAAK,GAAW,GAAY,CAAC;QACnC,OAAO;YACH,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,0BAA0B,KAAK,CAAC,OAAO,EAAE;SACnD,CAAC;IACN,CAAC;AACL,CAAC;AAED,MAAM,CAAC,OAAO,GAAG,EAAE,MAAM,EAAE,CAAC"}
|
File diff suppressed because it is too large
Load Diff
|
@ -21,13 +21,19 @@
|
|||
"@aws-sdk/client-s3": "^3.777.0",
|
||||
"@aws-sdk/s3-request-presigner": "^3.777.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"body-parser": "^2.2.0",
|
||||
"chokidar": "^4.0.3",
|
||||
"dotenv": "^16.4.7",
|
||||
"express": "^4.18.2",
|
||||
"express-handlebars": "^8.0.1",
|
||||
"lodash": "^4.17.21",
|
||||
"moment": "^2.30.1",
|
||||
"socket.io": "^4.7.2",
|
||||
"socket.io-client": "^4.7.2",
|
||||
"sqlite3": "^5.1.7",
|
||||
"stripe": "^17.7.0",
|
||||
"triple-beam": "^1.4.1",
|
||||
"uuid": "^11.1.0",
|
||||
"winston": "^3.17.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
#!/bin/bash
|
||||
|
||||
source .env
|
||||
|
||||
if [[ ! -f ${DATABASE_FILE} ]]; then
|
||||
echo "Please define DATABASE_FILE in the .env file"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cat sql/setup.sql | sqlite3 ${DATABASE_FILE}
|
|
@ -1,24 +1,43 @@
|
|||
CREATE TABLE IF NOT EXISTS files {
|
||||
CREATE TABLE IF NOT EXISTS files (
|
||||
id TEXT PRIMARY KEY,
|
||||
filename TEXT,
|
||||
original TEXT,
|
||||
filename TEXT NOT NULL,
|
||||
original TEXT NOT NULL,
|
||||
archive TEXT,
|
||||
paths TEXT,
|
||||
hash TEXT,
|
||||
size INTEGER,
|
||||
created INTEGER,
|
||||
created_str TEXT
|
||||
};
|
||||
hash TEXT NOT NULL,
|
||||
size INTEGER NOT NULL,
|
||||
created INTEGER NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS access {
|
||||
CREATE TABLE IF NOT EXISTS access (
|
||||
id TEXT PRIMARY KEY,
|
||||
slug TEXT UNIQUE NOT NULL,
|
||||
file TEXT NOT NULL,
|
||||
user TEXT,
|
||||
created INTEGER NOT NULL,
|
||||
expires INTEGER NOT NULL,
|
||||
paid INTEGER DEFAULT 0,
|
||||
url TEXT UNIQUE NOT NULL,
|
||||
service TEXT,
|
||||
downloads INTEGER DEFAULT 0,
|
||||
FOREIGN KEY(file) REFERENCES files(id),
|
||||
FOREIGN KEY(user) REFERENCES users(id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS metadata (
|
||||
id TEXT PRIMARY KEY,
|
||||
file TEXT REFERENCES files(id),
|
||||
created INTEGER,
|
||||
created_str TEXT,
|
||||
expires INTEGER,
|
||||
expires_str TEXT,
|
||||
paid INTEGER DEFAULT 0,
|
||||
url TEXT UNIQUE,
|
||||
service TEXT,
|
||||
downloads INTEGER DEFAULT 0
|
||||
}
|
||||
type TEXT,
|
||||
width INTEGER,
|
||||
height INTEGER,
|
||||
length REAL,
|
||||
format TEXT,
|
||||
preview TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id TEXT PRIMARY KEY,
|
||||
email TEXT UNIQUE,
|
||||
name TEXT,
|
||||
created INTEGER
|
||||
);
|
|
@ -0,0 +1,73 @@
|
|||
import { stat } from 'fs/promises';
|
||||
import type { Stats } from 'fs';
|
||||
import { basename, resolve } from 'path';
|
||||
|
||||
import { size } from '../size';
|
||||
import { Hashes } from '../hash';
|
||||
|
||||
interface FileInfo {
|
||||
success : boolean;
|
||||
filename? : string;
|
||||
original? : string;
|
||||
hash? : string;
|
||||
size? : number;
|
||||
created? : number;
|
||||
error? : Error | undefined;
|
||||
}
|
||||
|
||||
export class File {
|
||||
public static async info (filePath : string) : Promise<FileInfo> {
|
||||
const original : string = resolve(filePath);
|
||||
const filename : string = basename(filePath);
|
||||
let success : boolean = false;
|
||||
let hash : string;
|
||||
let fileSize : number;
|
||||
let created : number;
|
||||
let stats : Stats;
|
||||
let error : Error;
|
||||
|
||||
try {
|
||||
hash = await Hashes.file(filePath);
|
||||
} catch (err) {
|
||||
error = err as Error;
|
||||
return {
|
||||
success,
|
||||
error
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
fileSize = await size(filePath)
|
||||
} catch (err) {
|
||||
error = err as Error;
|
||||
return {
|
||||
success,
|
||||
error
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
stats = await stat(filePath);
|
||||
created = stats.birthtimeMs;
|
||||
} catch (err) {
|
||||
error = err as Error;
|
||||
return {
|
||||
success,
|
||||
error
|
||||
};
|
||||
}
|
||||
|
||||
success = true;
|
||||
return {
|
||||
success,
|
||||
filename,
|
||||
original,
|
||||
hash,
|
||||
size : fileSize,
|
||||
created
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { File };
|
||||
export type { FileInfo };
|
|
@ -1,8 +1,5 @@
|
|||
import { createHash, Hash } from 'crypto';
|
||||
import { createReadStream } from 'fs';
|
||||
import { unlink } from 'fs/promises';
|
||||
import { exec } from 'child_process';
|
||||
import { tmp } from '../tmp';
|
||||
|
||||
export class Hashes {
|
||||
public static async file (path : string) : Promise<string> {
|
||||
|
@ -19,41 +16,6 @@ export class Hashes {
|
|||
const sha : Hash = createHash('sha256').update(str);
|
||||
return sha.digest('hex');
|
||||
}
|
||||
|
||||
public static async gcode (gcodePath : string) : Promise<string> {
|
||||
const dest : string = tmp('.gcode', '3d-gcode-hash-');
|
||||
const cmd : string = `cat "${gcodePath}" | grep -v '; generated by PrusaSlicer' > "${dest}"`
|
||||
let hash : string;
|
||||
|
||||
try {
|
||||
await Hashes.exec(cmd);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
try {
|
||||
hash = await Hashes.file(dest);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
try {
|
||||
await unlink(dest);
|
||||
} catch (err) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
return hash;
|
||||
}
|
||||
|
||||
private static async exec (cmd : string) {
|
||||
return new Promise((resolve : Function, reject : Function) => {
|
||||
return exec(cmd, (err : Error, stdout : string | Buffer, stderr : string | Buffer) => {
|
||||
if (err) return reject(err);
|
||||
return resolve(true);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { Hashes };
|
522
src/index.ts
522
src/index.ts
|
@ -1,13 +1,89 @@
|
|||
|
||||
import express from 'express';
|
||||
import { envInt } from './env';
|
||||
import * as handlebars from 'express-handlebars';
|
||||
import type { Express, Request, Response, NextFunction } from 'express';
|
||||
import bodyParser from 'body-parser';
|
||||
import { join } from 'path';
|
||||
import type { Logger } from 'winston';
|
||||
|
||||
const app = express();
|
||||
const PORT = process.env.PORT || 3000;
|
||||
import { envInt } from './env';
|
||||
import { createLog } from './log';
|
||||
|
||||
// Serve static files
|
||||
app.use(express.static(join(__dirname, 'public')));
|
||||
const PORT : number = envInt('PORT', 3835);
|
||||
const viewsPath : string = join(__dirname, '../views');
|
||||
const staticPath : string = join(__dirname, '../static');
|
||||
|
||||
const app : Express = express();
|
||||
const log : Logger = createLog('icebox');
|
||||
const hbs : handlebars.ExpressHandlebars = handlebars.create({
|
||||
defaultLayout: 'main',
|
||||
layoutsDir: join(viewsPath, './layouts'),
|
||||
partialsDir: join(viewsPath, '/partials')
|
||||
});
|
||||
|
||||
app.use(express.static(staticPath));
|
||||
app.use(bodyParser.urlencoded({ extended: true }));
|
||||
app.engine('handlebars', hbs.engine);
|
||||
app.set('view engine', 'handlebars');
|
||||
app.set('views', viewsPath);
|
||||
|
||||
app.get('/', (req : Request, res : Response, next : NextFunction) => {
|
||||
const data : any = {};
|
||||
return res.render('home', data);
|
||||
});
|
||||
|
||||
app.post('/lookup', async (req : Request, res : Response, next : NextFunction) => {
|
||||
const data : any = {
|
||||
alert : {
|
||||
class: 'primary',
|
||||
message : 'Check your email. If one or more active download links are found they will be emailed to you.'
|
||||
}
|
||||
};
|
||||
if (typeof req.body !== 'undefined') {
|
||||
if (typeof req.body['first-name'] !== 'undefined' && req.body['first-name'] !== '') {
|
||||
//bot trap
|
||||
log.warn(`Potential bot trapped: ${JSON.stringify(req.body)}`);
|
||||
} else if (typeof req.body.email !== 'undefined' && req.body.email.trim() !== '') {
|
||||
|
||||
}
|
||||
}
|
||||
return res.render('home', data);
|
||||
});
|
||||
|
||||
app.get('/file/:id', async (req : Request, res : Response, next : NextFunction) => {
|
||||
const data : any = {};
|
||||
|
||||
data.file = {
|
||||
filename : 'Test.mov'
|
||||
};
|
||||
|
||||
if ( false ) {
|
||||
data.alert = {
|
||||
class : 'warning',
|
||||
message : `This download link expired `
|
||||
}
|
||||
}
|
||||
|
||||
if ( false ) {
|
||||
data.alert = {
|
||||
class : 'danger',
|
||||
message : `No download link was found with this ID`
|
||||
};
|
||||
return res.render('home', data);
|
||||
}
|
||||
|
||||
return res.render('file', data);
|
||||
});
|
||||
|
||||
app.get('/example.txt', (req : Request, res : Response, next : NextFunction) => {
|
||||
return res.send('Test');
|
||||
});
|
||||
|
||||
app.post('/file/:id/download', async (req : Request, res : Response, next : NextFunction) => {
|
||||
const data : any = {};
|
||||
const url : string = `http://localhost:${PORT}/example.txt`;
|
||||
return res.redirect(307, url);
|
||||
});
|
||||
|
||||
/*
|
||||
import http from 'http';
|
||||
|
@ -18,13 +94,13 @@ import { RTCPeerConnection, RTCSessionDescription, RTCIceCandidate } from 'wrtc'
|
|||
|
||||
// Define types
|
||||
interface FileTransferSession {
|
||||
peerConnection: RTCPeerConnection;
|
||||
dataChannel?: RTCDataChannel;
|
||||
fileStream?: fs.ReadStream;
|
||||
filePath: string;
|
||||
fileSize: number;
|
||||
chunkSize: number;
|
||||
sentBytes: number;
|
||||
peerConnection: RTCPeerConnection;
|
||||
dataChannel?: RTCDataChannel;
|
||||
fileStream?: fs.ReadStream;
|
||||
filePath: string;
|
||||
fileSize: number;
|
||||
chunkSize: number;
|
||||
sentBytes: number;
|
||||
}
|
||||
|
||||
// Initialize express app
|
||||
|
@ -41,240 +117,232 @@ const sessions: Map<string, FileTransferSession> = new Map();
|
|||
|
||||
// Configure WebRTC ICE servers (STUN/TURN)
|
||||
const iceServers = [
|
||||
{ urls: 'stun:stun.l.google.com:19302' },
|
||||
{ urls: 'stun:stun1.l.google.com:19302' }
|
||||
{ urls: 'stun:stun.l.google.com:19302' },
|
||||
{ urls: 'stun:stun1.l.google.com:19302' }
|
||||
];
|
||||
|
||||
io.on('connection', (socket) => {
|
||||
console.log('Client connected:', socket.id);
|
||||
console.log('Client connected:', socket.id);
|
||||
|
||||
// Handle request for available files
|
||||
socket.on('get-files', () => {
|
||||
const filesDirectory = path.join(__dirname, 'files');
|
||||
try {
|
||||
const files = fs.readdirSync(filesDirectory)
|
||||
.filter(file => fs.statSync(path.join(filesDirectory, file)).isFile())
|
||||
.map(file => {
|
||||
const filePath = path.join(filesDirectory, file);
|
||||
const stats = fs.statSync(filePath);
|
||||
return {
|
||||
name: file,
|
||||
size: stats.size,
|
||||
modified: stats.mtime
|
||||
};
|
||||
// Handle request for available files
|
||||
socket.on('get-files', () => {
|
||||
const filesDirectory = path.join(__dirname, 'files');
|
||||
try {
|
||||
const files = fs.readdirSync(filesDirectory)
|
||||
.filter(file => fs.statSync(path.join(filesDirectory, file)).isFile())
|
||||
.map(file => {
|
||||
const filePath = path.join(filesDirectory, file);
|
||||
const stats = fs.statSync(filePath);
|
||||
return {
|
||||
name: file,
|
||||
size: stats.size,
|
||||
modified: stats.mtime
|
||||
};
|
||||
});
|
||||
socket.emit('files-list', files);
|
||||
} catch (err) {
|
||||
console.error('Error reading files directory:', err);
|
||||
socket.emit('error', 'Failed to retrieve files list');
|
||||
}
|
||||
});
|
||||
|
||||
// Handle file transfer request
|
||||
socket.on('request-file', (fileName: string) => {
|
||||
const filePath = path.join(__dirname, 'files', fileName);
|
||||
|
||||
// Check if file exists
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return socket.emit('error', 'File not found');
|
||||
}
|
||||
|
||||
const fileSize = fs.statSync(filePath).size;
|
||||
const chunkSize = 16384; // 16KB chunks
|
||||
|
||||
// Create and configure peer connection
|
||||
const peerConnection = new RTCPeerConnection({ iceServers });
|
||||
|
||||
// Create data channel
|
||||
const dataChannel = peerConnection.createDataChannel('fileTransfer', {
|
||||
ordered: true
|
||||
});
|
||||
socket.emit('files-list', files);
|
||||
} catch (err) {
|
||||
console.error('Error reading files directory:', err);
|
||||
socket.emit('error', 'Failed to retrieve files list');
|
||||
}
|
||||
});
|
||||
|
||||
// Handle file transfer request
|
||||
socket.on('request-file', (fileName: string) => {
|
||||
const filePath = path.join(__dirname, 'files', fileName);
|
||||
|
||||
// Check if file exists
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return socket.emit('error', 'File not found');
|
||||
}
|
||||
|
||||
const fileSize = fs.statSync(filePath).size;
|
||||
const chunkSize = 16384; // 16KB chunks
|
||||
|
||||
// Create and configure peer connection
|
||||
const peerConnection = new RTCPeerConnection({ iceServers });
|
||||
|
||||
// Create data channel
|
||||
const dataChannel = peerConnection.createDataChannel('fileTransfer', {
|
||||
ordered: true
|
||||
|
||||
// Store session info
|
||||
sessions.set(socket.id, {
|
||||
peerConnection,
|
||||
dataChannel,
|
||||
filePath,
|
||||
fileSize,
|
||||
chunkSize,
|
||||
sentBytes: 0
|
||||
});
|
||||
|
||||
// Handle ICE candidates
|
||||
peerConnection.onicecandidate = (event) => {
|
||||
if (event.candidate) {
|
||||
socket.emit('ice-candidate', event.candidate);
|
||||
}
|
||||
};
|
||||
|
||||
// Set up data channel handlers
|
||||
dataChannel.onopen = () => {
|
||||
console.log(`Data channel opened for client ${socket.id}`);
|
||||
startFileTransfer(socket.id);
|
||||
};
|
||||
|
||||
dataChannel.onclose = () => {
|
||||
console.log(`Data channel closed for client ${socket.id}`);
|
||||
cleanupSession(socket.id);
|
||||
};
|
||||
|
||||
dataChannel.onerror = (error) => {
|
||||
console.error(`Data channel error for client ${socket.id}:`, error);
|
||||
cleanupSession(socket.id);
|
||||
};
|
||||
|
||||
// Create offer
|
||||
peerConnection.createOffer()
|
||||
.then(offer => peerConnection.setLocalDescription(offer))
|
||||
.then(() => {
|
||||
socket.emit('offer', {
|
||||
sdp: peerConnection.localDescription,
|
||||
fileInfo: {
|
||||
name: path.basename(filePath),
|
||||
size: fileSize
|
||||
}
|
||||
});
|
||||
})
|
||||
.catch(err => {
|
||||
console.error('Error creating offer:', err);
|
||||
socket.emit('error', 'Failed to create connection offer');
|
||||
cleanupSession(socket.id);
|
||||
});
|
||||
});
|
||||
|
||||
// Store session info
|
||||
sessions.set(socket.id, {
|
||||
peerConnection,
|
||||
dataChannel,
|
||||
filePath,
|
||||
fileSize,
|
||||
chunkSize,
|
||||
sentBytes: 0
|
||||
// Handle answer from browser
|
||||
socket.on('answer', async (answer: RTCSessionDescription) => {
|
||||
try {
|
||||
const session = sessions.get(socket.id);
|
||||
if (!session) return;
|
||||
|
||||
await session.peerConnection.setRemoteDescription(new RTCSessionDescription(answer));
|
||||
console.log(`Connection established with client ${socket.id}`);
|
||||
} catch (err) {
|
||||
console.error('Error setting remote description:', err);
|
||||
socket.emit('error', 'Failed to establish connection');
|
||||
cleanupSession(socket.id);
|
||||
}
|
||||
});
|
||||
|
||||
// Handle ICE candidates
|
||||
peerConnection.onicecandidate = (event) => {
|
||||
if (event.candidate) {
|
||||
socket.emit('ice-candidate', event.candidate);
|
||||
}
|
||||
};
|
||||
// Handle ICE candidates from browser
|
||||
socket.on('ice-candidate', async (candidate: RTCIceCandidate) => {
|
||||
try {
|
||||
const session = sessions.get(socket.id);
|
||||
if (!session) return;
|
||||
|
||||
await session.peerConnection.addIceCandidate(new RTCIceCandidate(candidate));
|
||||
} catch (err) {
|
||||
console.error('Error adding ICE candidate:', err);
|
||||
}
|
||||
});
|
||||
|
||||
// Set up data channel handlers
|
||||
dataChannel.onopen = () => {
|
||||
console.log(`Data channel opened for client ${socket.id}`);
|
||||
startFileTransfer(socket.id);
|
||||
};
|
||||
|
||||
dataChannel.onclose = () => {
|
||||
console.log(`Data channel closed for client ${socket.id}`);
|
||||
cleanupSession(socket.id);
|
||||
};
|
||||
|
||||
dataChannel.onerror = (error) => {
|
||||
console.error(`Data channel error for client ${socket.id}:`, error);
|
||||
cleanupSession(socket.id);
|
||||
};
|
||||
|
||||
// Create offer
|
||||
peerConnection.createOffer()
|
||||
.then(offer => peerConnection.setLocalDescription(offer))
|
||||
.then(() => {
|
||||
socket.emit('offer', {
|
||||
sdp: peerConnection.localDescription,
|
||||
fileInfo: {
|
||||
name: path.basename(filePath),
|
||||
size: fileSize
|
||||
}
|
||||
});
|
||||
})
|
||||
.catch(err => {
|
||||
console.error('Error creating offer:', err);
|
||||
socket.emit('error', 'Failed to create connection offer');
|
||||
// Handle client disconnection
|
||||
socket.on('disconnect', () => {
|
||||
console.log('Client disconnected:', socket.id);
|
||||
cleanupSession(socket.id);
|
||||
});
|
||||
});
|
||||
|
||||
// Handle answer from browser
|
||||
socket.on('answer', async (answer: RTCSessionDescription) => {
|
||||
try {
|
||||
const session = sessions.get(socket.id);
|
||||
if (!session) return;
|
||||
|
||||
await session.peerConnection.setRemoteDescription(new RTCSessionDescription(answer));
|
||||
console.log(`Connection established with client ${socket.id}`);
|
||||
} catch (err) {
|
||||
console.error('Error setting remote description:', err);
|
||||
socket.emit('error', 'Failed to establish connection');
|
||||
cleanupSession(socket.id);
|
||||
}
|
||||
});
|
||||
|
||||
// Handle ICE candidates from browser
|
||||
socket.on('ice-candidate', async (candidate: RTCIceCandidate) => {
|
||||
try {
|
||||
const session = sessions.get(socket.id);
|
||||
if (!session) return;
|
||||
|
||||
await session.peerConnection.addIceCandidate(new RTCIceCandidate(candidate));
|
||||
} catch (err) {
|
||||
console.error('Error adding ICE candidate:', err);
|
||||
}
|
||||
});
|
||||
|
||||
// Handle client disconnection
|
||||
socket.on('disconnect', () => {
|
||||
console.log('Client disconnected:', socket.id);
|
||||
cleanupSession(socket.id);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Start file transfer
|
||||
function startFileTransfer(socketId: string): void {
|
||||
const session = sessions.get(socketId);
|
||||
if (!session || !session.dataChannel) return;
|
||||
|
||||
// Send file info first
|
||||
session.dataChannel.send(JSON.stringify({
|
||||
type: 'file-info',
|
||||
name: path.basename(session.filePath),
|
||||
size: session.fileSize
|
||||
}));
|
||||
|
||||
// Open file stream
|
||||
session.fileStream = fs.createReadStream(session.filePath, {
|
||||
highWaterMark: session.chunkSize
|
||||
});
|
||||
|
||||
// Process file chunks
|
||||
session.fileStream.on('data', (chunk: Buffer) => {
|
||||
// Check if data channel is still open and ready
|
||||
if (session.dataChannel?.readyState === 'open') {
|
||||
// Pause the stream to handle backpressure
|
||||
session.fileStream?.pause();
|
||||
|
||||
// Send chunk as ArrayBuffer
|
||||
session.dataChannel.send(chunk);
|
||||
session.sentBytes += chunk.length;
|
||||
|
||||
// Report progress
|
||||
if (session.sentBytes % (5 * 1024 * 1024) === 0) { // Every 5MB
|
||||
console.log(`Sent ${session.sentBytes / (1024 * 1024)}MB of ${session.fileSize / (1024 * 1024)}MB`);
|
||||
}
|
||||
|
||||
// Check buffer status before resuming
|
||||
const bufferAmount = session.dataChannel.bufferedAmount;
|
||||
if (bufferAmount < session.chunkSize * 2) {
|
||||
// Resume reading if buffer is below threshold
|
||||
session.fileStream?.resume();
|
||||
} else {
|
||||
// Wait for buffer to drain
|
||||
const checkBuffer = setInterval(() => {
|
||||
if (session.dataChannel?.bufferedAmount < session.chunkSize) {
|
||||
clearInterval(checkBuffer);
|
||||
session.fileStream?.resume();
|
||||
}
|
||||
}, 100);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Handle end of file
|
||||
session.fileStream.on('end', () => {
|
||||
if (session.dataChannel?.readyState === 'open') {
|
||||
session.dataChannel.send(JSON.stringify({ type: 'file-complete' }));
|
||||
console.log(`File transfer complete for client ${socketId}`);
|
||||
}
|
||||
});
|
||||
|
||||
// Handle file stream errors
|
||||
session.fileStream.on('error', (err) => {
|
||||
console.error(`File stream error for client ${socketId}:`, err);
|
||||
if (session.dataChannel?.readyState === 'open') {
|
||||
session.dataChannel.send(JSON.stringify({
|
||||
type: 'error',
|
||||
message: 'File read error on server'
|
||||
}));
|
||||
}
|
||||
cleanupSession(socketId);
|
||||
});
|
||||
const session = sessions.get(socketId);
|
||||
if (!session || !session.dataChannel) return;
|
||||
|
||||
// Send file info first
|
||||
session.dataChannel.send(JSON.stringify({
|
||||
type: 'file-info',
|
||||
name: path.basename(session.filePath),
|
||||
size: session.fileSize
|
||||
}));
|
||||
|
||||
// Open file stream
|
||||
session.fileStream = fs.createReadStream(session.filePath, {
|
||||
highWaterMark: session.chunkSize
|
||||
});
|
||||
|
||||
// Process file chunks
|
||||
session.fileStream.on('data', (chunk: Buffer) => {
|
||||
// Check if data channel is still open and ready
|
||||
if (session.dataChannel?.readyState === 'open') {
|
||||
// Pause the stream to handle backpressure
|
||||
session.fileStream?.pause();
|
||||
|
||||
// Send chunk as ArrayBuffer
|
||||
session.dataChannel.send(chunk);
|
||||
session.sentBytes += chunk.length;
|
||||
|
||||
// Report progress
|
||||
if (session.sentBytes % (5 * 1024 * 1024) === 0) { // Every 5MB
|
||||
console.log(`Sent ${session.sentBytes / (1024 * 1024)}MB of ${session.fileSize / (1024 * 1024)}MB`);
|
||||
}
|
||||
|
||||
// Check buffer status before resuming
|
||||
const bufferAmount = session.dataChannel.bufferedAmount;
|
||||
if (bufferAmount < session.chunkSize * 2) {
|
||||
// Resume reading if buffer is below threshold
|
||||
session.fileStream?.resume();
|
||||
} else {
|
||||
// Wait for buffer to drain
|
||||
const checkBuffer = setInterval(() => {
|
||||
if (session.dataChannel?.bufferedAmount < session.chunkSize) {
|
||||
clearInterval(checkBuffer);
|
||||
session.fileStream?.resume();
|
||||
}
|
||||
}, 100);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Handle end of file
|
||||
session.fileStream.on('end', () => {
|
||||
if (session.dataChannel?.readyState === 'open') {
|
||||
session.dataChannel.send(JSON.stringify({ type: 'file-complete' }));
|
||||
console.log(`File transfer complete for client ${socketId}`);
|
||||
}
|
||||
});
|
||||
|
||||
// Handle file stream errors
|
||||
session.fileStream.on('error', (err) => {
|
||||
console.error(`File stream error for client ${socketId}:`, err);
|
||||
if (session.dataChannel?.readyState === 'open') {
|
||||
session.dataChannel.send(JSON.stringify({
|
||||
type: 'error',
|
||||
message: 'File read error on server'
|
||||
}));
|
||||
}
|
||||
cleanupSession(socketId);
|
||||
});
|
||||
}
|
||||
|
||||
// Clean up session resources
|
||||
function cleanupSession(socketId: string): void {
|
||||
const session = sessions.get(socketId);
|
||||
if (!session) return;
|
||||
|
||||
if (session.fileStream) {
|
||||
session.fileStream.destroy();
|
||||
}
|
||||
|
||||
if (session.dataChannel && session.dataChannel.readyState === 'open') {
|
||||
session.dataChannel.close();
|
||||
}
|
||||
|
||||
session.peerConnection.close();
|
||||
sessions.delete(socketId);
|
||||
console.log(`Cleaned up session for client ${socketId}`);
|
||||
const session = sessions.get(socketId);
|
||||
if (!session) return;
|
||||
|
||||
if (session.fileStream) {
|
||||
session.fileStream.destroy();
|
||||
}
|
||||
|
||||
if (session.dataChannel && session.dataChannel.readyState === 'open') {
|
||||
session.dataChannel.close();
|
||||
}
|
||||
|
||||
session.peerConnection.close();
|
||||
sessions.delete(socketId);
|
||||
console.log(`Cleaned up session for client ${socketId}`);
|
||||
}
|
||||
|
||||
// Start the server
|
||||
server.listen(PORT, () => {
|
||||
console.log(`Server running on port ${PORT}`);
|
||||
|
||||
// Create files directory if it doesn't exist
|
||||
const filesDir = path.join(__dirname, 'files');
|
||||
if (!fs.existsSync(filesDir)) {
|
||||
fs.mkdirSync(filesDir);
|
||||
console.log('Created files directory');
|
||||
}
|
||||
});
|
||||
*/
|
||||
|
||||
*/
|
||||
app.listen(PORT, () => {
|
||||
log.info(`Server running on port ${PORT}`);
|
||||
});
|
|
@ -2,22 +2,31 @@ import 'dotenv/config';
|
|||
|
||||
import { upload } from '../upload';
|
||||
import { createLog } from '../log';
|
||||
import { File } from '../file';
|
||||
|
||||
import chokidar from 'chokidar';
|
||||
import type { Logger } from 'winston';
|
||||
import type { UploadResult, S3Config } from '../upload';
|
||||
import type { FileInfo } from '../file';
|
||||
|
||||
const EXPIRATION : number = 3600; //1 hour
|
||||
|
||||
const log : Logger = createLog('files');
|
||||
|
||||
async function processUpload (filePath : string) {
|
||||
async function process (filePath : string) {
|
||||
const config : S3Config = {
|
||||
region: 'us-east-1',
|
||||
bucketName: 'your-bucket-name',
|
||||
expirationSeconds: EXPIRATION
|
||||
};
|
||||
|
||||
const fileInfo : FileInfo = await File.info(filePath);
|
||||
|
||||
if (!fileInfo.success) {
|
||||
log.error(`Error processing file info`, fileInfo.error);
|
||||
return;
|
||||
}
|
||||
|
||||
log.info(`Started upload: ${filePath}`);
|
||||
const result : UploadResult = await upload('test', filePath, config);
|
||||
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
export function shortid () : string {
|
||||
let firstPart : number = (Math.random() * 46656) | 0;
|
||||
let secondPart : number = (Math.random() * 46656) | 0;
|
||||
return ("000" + firstPart.toString(36)).slice(-3) + ("000" + secondPart.toString(36)).slice(-3);
|
||||
}
|
||||
|
||||
module.exports = { shortid };
|
|
@ -0,0 +1,16 @@
|
|||
import { tmpdir } from 'os';
|
||||
import { join } from 'path';
|
||||
import { randomBytes } from 'crypto';
|
||||
|
||||
const TMP : string = tmpdir();
|
||||
|
||||
export function tmp (ext : string = '', prefix : string = '3d-tmp-', fileName : string | null = null) : string {
|
||||
let path : string | null = null;
|
||||
if (fileName === null) {
|
||||
fileName = prefix + randomBytes(32).toString('hex') + ext;
|
||||
}
|
||||
path = join(TMP, fileName);
|
||||
return path;
|
||||
}
|
||||
|
||||
module.exports = { tmp };
|
|
@ -5,22 +5,22 @@ import { resolve, basename, extname } from 'path';
|
|||
|
||||
// S3 configuration interface
|
||||
interface S3Config {
|
||||
region: string;
|
||||
bucketName: string;
|
||||
expirationSeconds: number;
|
||||
endpoint?: string;
|
||||
credentials?: {
|
||||
accessKeyId: string;
|
||||
secretAccessKey: string;
|
||||
};
|
||||
forcePathStyle?: boolean;
|
||||
region: string;
|
||||
bucketName: string;
|
||||
expirationSeconds: number;
|
||||
endpoint?: string;
|
||||
credentials?: {
|
||||
accessKeyId: string;
|
||||
secretAccessKey: string;
|
||||
};
|
||||
forcePathStyle?: boolean;
|
||||
}
|
||||
// Function response interface
|
||||
interface UploadResult {
|
||||
success: boolean;
|
||||
url?: string;
|
||||
key?: string;
|
||||
error?: string;
|
||||
success: boolean;
|
||||
url?: string;
|
||||
key?: string;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -30,78 +30,78 @@ interface UploadResult {
|
|||
* @returns Promise containing the result with URL if successful
|
||||
*/
|
||||
export async function upload (id: string, filePath: string, config: S3Config): Promise<UploadResult> {
|
||||
const fullPath : string = resolve(filePath);
|
||||
const fileName : string = basename(filePath);
|
||||
const extName : string = extname(filePath);
|
||||
const key: string = `${id}${extName}`;
|
||||
const fullPath : string = resolve(filePath);
|
||||
const fileName : string = basename(filePath);
|
||||
const extName : string = extname(filePath);
|
||||
const key: string = `${id}${extName}`;
|
||||
|
||||
if (!key) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Could not create key'
|
||||
};
|
||||
}
|
||||
|
||||
let fileStream : ReadStream;
|
||||
|
||||
try {
|
||||
fileStream = createReadStream(fullPath);
|
||||
} catch (err) {
|
||||
const error: Error = err as Error;
|
||||
return {
|
||||
success: false,
|
||||
error: `Error reading file: ${error.message}`
|
||||
};
|
||||
}
|
||||
|
||||
const s3ClientOptions: Record<string, any> = {
|
||||
region: config.region
|
||||
};
|
||||
if (!key) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Could not create key'
|
||||
};
|
||||
}
|
||||
|
||||
let fileStream : ReadStream;
|
||||
|
||||
try {
|
||||
fileStream = createReadStream(fullPath);
|
||||
} catch (err) {
|
||||
const error: Error = err as Error;
|
||||
return {
|
||||
success: false,
|
||||
error: `Error reading file: ${error.message}`
|
||||
};
|
||||
}
|
||||
|
||||
const s3ClientOptions: Record<string, any> = {
|
||||
region: config.region
|
||||
};
|
||||
|
||||
if (config.endpoint) {
|
||||
s3ClientOptions.endpoint = config.endpoint;
|
||||
}
|
||||
if (config.endpoint) {
|
||||
s3ClientOptions.endpoint = config.endpoint;
|
||||
}
|
||||
|
||||
if (config.credentials) {
|
||||
s3ClientOptions.credentials = config.credentials;
|
||||
}
|
||||
if (config.credentials) {
|
||||
s3ClientOptions.credentials = config.credentials;
|
||||
}
|
||||
|
||||
if (config.forcePathStyle !== undefined) {
|
||||
s3ClientOptions.forcePathStyle = config.forcePathStyle;
|
||||
}
|
||||
if (config.forcePathStyle !== undefined) {
|
||||
s3ClientOptions.forcePathStyle = config.forcePathStyle;
|
||||
}
|
||||
|
||||
const s3Client: S3Client = new S3Client(s3ClientOptions);
|
||||
const s3Client: S3Client = new S3Client(s3ClientOptions);
|
||||
|
||||
try {
|
||||
const uploadCommand: PutObjectCommand = new PutObjectCommand({
|
||||
Bucket: config.bucketName,
|
||||
Key: key,
|
||||
Body: fileStream
|
||||
});
|
||||
|
||||
await s3Client.send(uploadCommand);
|
||||
|
||||
const getCommand: GetObjectCommand = new GetObjectCommand({
|
||||
Bucket: config.bucketName,
|
||||
Key: key
|
||||
});
|
||||
|
||||
const url: string = await getSignedUrl(s3Client, getCommand, {
|
||||
expiresIn: config.expirationSeconds
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
url,
|
||||
key
|
||||
};
|
||||
} catch (err) {
|
||||
const error : Error = err as Error;
|
||||
return {
|
||||
success: false,
|
||||
error: `Error uploading to S3: ${error.message}`
|
||||
};
|
||||
}
|
||||
try {
|
||||
const uploadCommand: PutObjectCommand = new PutObjectCommand({
|
||||
Bucket: config.bucketName,
|
||||
Key: key,
|
||||
Body: fileStream
|
||||
});
|
||||
|
||||
await s3Client.send(uploadCommand);
|
||||
|
||||
const getCommand: GetObjectCommand = new GetObjectCommand({
|
||||
Bucket: config.bucketName,
|
||||
Key: key
|
||||
});
|
||||
|
||||
const url : string = await getSignedUrl(s3Client, getCommand, {
|
||||
expiresIn: config.expirationSeconds
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
url,
|
||||
key
|
||||
};
|
||||
} catch (err) {
|
||||
const error : Error = err as Error;
|
||||
return {
|
||||
success: false,
|
||||
error: `Error uploading to S3: ${error.message}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { upload };
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,7 @@
|
|||
#lookupForm #first-name{
|
||||
display: none;
|
||||
}
|
||||
|
||||
footer{
|
||||
text-align: center;
|
||||
}
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,14 @@
|
|||
<div id="download" class="row justify-content-md-center">
|
||||
{{#if file}}
|
||||
<div class="card col col-lg-6">
|
||||
<div class="card-body">
|
||||
<form action="download" method="POST" id="downloadForm">
|
||||
<div>
|
||||
<h4>{{file.filename}}</h4>
|
||||
</div>
|
||||
<button type="submit" class="btn btn-primary">Download</button>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
{{/if}}
|
||||
</div>
|
|
@ -0,0 +1,16 @@
|
|||
|
||||
<div id="home" class="row justify-content-md-center">
|
||||
<div class="card col col-lg-6">
|
||||
<div class="card-body">
|
||||
<form action="/lookup" method="POST" id="lookupForm">
|
||||
<div class="form-group">
|
||||
<label for="email">Lookup </label>
|
||||
<input type="email" class="form-control" name="email" id="email" aria-describedby="emailHelp" placeholder="Enter email address">
|
||||
<small id="emailHelp" class="form-text text-muted">The email that your file was originally shared to.</small>
|
||||
</div>
|
||||
<input type="text" name="first-name" id="first-name" />
|
||||
<button type="submit" class="btn btn-primary">Submit</button>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
|
@ -0,0 +1,30 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta http-equiv="X-UA-Compatible" content="ie=edge">
|
||||
{{#if title}}
|
||||
<title>Icebox- {{title}}</title>
|
||||
{{else}}
|
||||
<title>Icebox</title>
|
||||
{{/if}}
|
||||
<link rel="stylesheet" href="/css/bootstrap.min.css" />
|
||||
<link rel="stylesheet" href="/css/style.css" />
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<div id="main" class="container-fluid">
|
||||
{{> nav}}
|
||||
<div style="height: 80px;"></div>
|
||||
{{#if alert}}
|
||||
{{> alert}}
|
||||
{{/if}}
|
||||
{{{body}}}
|
||||
</div>
|
||||
{{> footer}}
|
||||
</body>
|
||||
<script src="/js/bootstrap.min.js"></script>
|
||||
</html>
|
|
@ -0,0 +1,3 @@
|
|||
<div class="alert alert-{{alert.class}}" role="alert">
|
||||
{{alert.message}}
|
||||
</div>
|
|
@ -0,0 +1,5 @@
|
|||
<footer class="container-fluid">
|
||||
<p class="small">
|
||||
© <script>document.write(/\d{4}/.exec(Date())[0])</script>
|
||||
</p
|
||||
</footer>
|
|
@ -0,0 +1,29 @@
|
|||
<nav class="navbar fixed-top navbar-expand-md navbar-dark bg-primary mb-3">
|
||||
<div class="flex-row d-flex">
|
||||
<button type="button" class="navbar-toggler mr-2 " data-toggle="offcanvas" title="Toggle responsive left sidebar">
|
||||
<span class="navbar-toggler-icon"></span>
|
||||
</button>
|
||||
<a class="navbar-brand" href="/" title="Home">Icebox</a>
|
||||
</div>
|
||||
<button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#collapsingNavbar">
|
||||
<span class="navbar-toggler-icon"></span>
|
||||
</button>
|
||||
<div class="navbar-collapse collapse" id="collapsingNavbar">
|
||||
<ul class="navbar-nav">
|
||||
<li class="nav-item active">
|
||||
<a class="nav-link" href="/">Home <span class="sr-only">Home</span></a>
|
||||
</li>
|
||||
<!--
|
||||
links
|
||||
-->
|
||||
</ul>
|
||||
<ul class="navbar-nav ml-auto">
|
||||
<!--<li class="nav-item">
|
||||
<a class="nav-link" href="#myAlert" data-toggle="collapse">Alert</a>
|
||||
</li>
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" href="" data-target="#myModal" data-toggle="modal">About</a>
|
||||
</li>-->
|
||||
</ul>
|
||||
</div>
|
||||
</nav>
|
Loading…
Reference in New Issue