Initial commit

This commit is contained in:
Matt McWilliams 2025-03-29 21:51:50 -04:00
commit ae3f7d9c0a
30 changed files with 6056 additions and 0 deletions

3
.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
node_modules
.env
watch/*

323
client/index.ts Normal file
View File

@ -0,0 +1,323 @@
// client.ts - Browser client for WebRTC file transfers
import { io, Socket } from 'socket.io-client';
export class WebRTCFileClient {
private socket: Socket;
private peerConnection: RTCPeerConnection | null = null;
private dataChannel: RTCDataChannel | null = null;
private receivedSize: number = 0;
private fileSize: number = 0;
private fileName: string = '';
private fileChunks: Uint8Array[] = [];
private onProgressCallback: ((progress: number) => void) | null = null;
private onCompleteCallback: ((file: Blob, fileName: string) => void) | null = null;
private onErrorCallback: ((error: string) => void) | null = null;
private onFilesListCallback: ((files: any[]) => void) | null = null;
constructor(serverUrl: string = window.location.origin) {
this.socket = io(serverUrl);
this.setupSocketListeners();
}
/**
* Set up Socket.IO event listeners
*/
private setupSocketListeners(): void {
this.socket.on('connect', () => {
console.log('Connected to server, socket ID:', this.socket.id);
});
this.socket.on('offer', (data: { sdp: RTCSessionDescription, fileInfo: { name: string, size: number } }) => {
console.log('Received offer from server');
this.fileName = data.fileInfo.name;
this.fileSize = data.fileInfo.size;
this.setupPeerConnection();
this.handleOffer(data.sdp);
});
this.socket.on('ice-candidate', (candidate: RTCIceCandidate) => {
if (this.peerConnection) {
this.peerConnection.addIceCandidate(new RTCIceCandidate(candidate))
.catch(err => console.error('Error adding received ice candidate', err));
}
});
this.socket.on('error', (errorMessage: string) => {
console.error('Server error:', errorMessage);
if (this.onErrorCallback) {
this.onErrorCallback(errorMessage);
}
});
this.socket.on('files-list', (files: any[]) => {
console.log('Received files list:', files);
if (this.onFilesListCallback) {
this.onFilesListCallback(files);
}
});
this.socket.on('disconnect', () => {
console.log('Disconnected from server');
this.cleanupFileTransfer();
});
}
/**
* Set up WebRTC peer connection
*/
private setupPeerConnection(): void {
// Close any existing connection
if (this.peerConnection) {
this.peerConnection.close();
}
// Configure ICE servers (STUN/TURN)
const configuration = {
iceServers: [
{ urls: 'stun:stun.l.google.com:19302' },
{ urls: 'stun:stun1.l.google.com:19302' }
]
};
this.peerConnection = new RTCPeerConnection(configuration);
this.fileChunks = [];
this.receivedSize = 0;
// Handle ICE candidates
this.peerConnection.onicecandidate = (event) => {
if (event.candidate) {
this.socket.emit('ice-candidate', event.candidate);
}
};
// Handle incoming data channels
this.peerConnection.ondatachannel = (event) => {
this.dataChannel = event.channel;
this.setupDataChannel();
};
}
/**
* Set up the data channel for file transfer
*/
private setupDataChannel(): void {
if (!this.dataChannel) return;
this.dataChannel.binaryType = 'arraybuffer';
this.dataChannel.onopen = () => {
console.log('Data channel is open');
};
this.dataChannel.onmessage = (event) => {
const data = event.data;
// Handle JSON messages
if (typeof data === 'string') {
try {
const message = JSON.parse(data);
if (message.type === 'file-info') {
// Reset for new file
this.fileName = message.name;
this.fileSize = message.size;
this.fileChunks = [];
this.receivedSize = 0;
console.log(`Receiving file: ${this.fileName}, Size: ${this.formatFileSize(this.fileSize)}`);
}
else if (message.type === 'file-complete') {
this.completeFileTransfer();
}
else if (message.type === 'error') {
if (this.onErrorCallback) {
this.onErrorCallback(message.message);
}
}
} catch (e) {
console.error('Error parsing message:', e);
}
}
// Handle binary data (file chunks)
else if (data instanceof ArrayBuffer) {
this.fileChunks.push(new Uint8Array(data));
this.receivedSize += data.byteLength;
// Update progress
if (this.onProgressCallback && this.fileSize > 0) {
const progress = Math.min((this.receivedSize / this.fileSize) * 100, 100);
this.onProgressCallback(progress);
}
}
};
this.dataChannel.onclose = () => {
console.log('Data channel closed');
};
this.dataChannel.onerror = (error) => {
console.error('Data channel error:', error);
if (this.onErrorCallback) {
this.onErrorCallback('Data channel error');
}
};
}
/**
* Handle the WebRTC offer from the server
*/
private async handleOffer(sdp: RTCSessionDescription): Promise<void> {
if (!this.peerConnection) return;
try {
await this.peerConnection.setRemoteDescription(new RTCSessionDescription(sdp));
const answer = await this.peerConnection.createAnswer();
await this.peerConnection.setLocalDescription(answer);
this.socket.emit('answer', answer);
} catch (error) {
console.error('Error handling offer:', error);
if (this.onErrorCallback) {
this.onErrorCallback('Failed to establish connection');
}
}
}
/**
* Complete the file transfer process
*/
private completeFileTransfer(): void {
// Combine file chunks
if (this.fileChunks.length === 0) {
console.error('No file data received');
if (this.onErrorCallback) {
this.onErrorCallback('No file data received');
}
return;
}
// Calculate total size
let totalLength = 0;
for (const chunk of this.fileChunks) {
totalLength += chunk.length;
}
// Create a single Uint8Array with all data
const completeFile = new Uint8Array(totalLength);
let offset = 0;
for (const chunk of this.fileChunks) {
completeFile.set(chunk, offset);
offset += chunk.length;
}
// Create Blob from the complete file data
const fileBlob = new Blob([completeFile]);
console.log(`File transfer complete: ${this.fileName}, Size: ${this.formatFileSize(fileBlob.size)}`);
// Trigger completion callback
if (this.onCompleteCallback) {
this.onCompleteCallback(fileBlob, this.fileName);
}
// Clean up resources
this.cleanupFileTransfer();
}
/**
* Clean up resources used for file transfer
*/
private cleanupFileTransfer(): void {
this.fileChunks = [];
this.receivedSize = 0;
if (this.dataChannel) {
this.dataChannel.close();
this.dataChannel = null;
}
if (this.peerConnection) {
this.peerConnection.close();
this.peerConnection = null;
}
}
/**
* Format file size for display
*/
private formatFileSize(bytes: number): string {
if (bytes === 0) return '0 Bytes';
const k = 1024;
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
}
/**
* Request the list of available files from the server
*/
public getFilesList(): void {
this.socket.emit('get-files');
}
/**
* Request a specific file from the server
*/
public requestFile(fileName: string): void {
this.socket.emit('request-file', fileName);
}
/**
* Set callback for progress updates
*/
public onProgress(callback: (progress: number) => void): void {
this.onProgressCallback = callback;
}
/**
* Set callback for file transfer completion
*/
public onComplete(callback: (file: Blob, fileName: string) => void): void {
this.onCompleteCallback = callback;
}
/**
* Set callback for error handling
*/
public onError(callback: (error: string) => void): void {
this.onErrorCallback = callback;
}
/**
* Set callback for files list
*/
public onFilesList(callback: (files: any[]) => void): void {
this.onFilesListCallback = callback;
}
/**
* Save the received file
*/
public saveFile(blob: Blob, fileName: string): void {
// Create a URL for the blob
const url = URL.createObjectURL(blob);
// Create an anchor element and trigger download
const a = document.createElement('a');
a.href = url;
a.download = fileName;
document.body.appendChild(a);
a.click();
// Clean up
window.setTimeout(() => {
document.body.removeChild(a);
URL.revokeObjectURL(url);
}, 0);
}
/**
* Disconnect from the server
*/
public disconnect(): void {
this.socket.disconnect();
this.cleanupFileTransfer();
}
}

0
dist/index.d.ts vendored Normal file
View File

272
dist/index.js vendored Normal file
View File

@ -0,0 +1,272 @@
"use strict";
/*
import express from 'express';
import http from 'http';
import path from 'path';
import fs from 'fs';
import { Server as SocketIOServer } from 'socket.io';
import { RTCPeerConnection, RTCSessionDescription, RTCIceCandidate } from 'wrtc';
// Define types
interface FileTransferSession {
peerConnection: RTCPeerConnection;
dataChannel?: RTCDataChannel;
fileStream?: fs.ReadStream;
filePath: string;
fileSize: number;
chunkSize: number;
sentBytes: number;
}
// Initialize express app
const app = express();
const server = http.createServer(app);
const io = new SocketIOServer(server);
const PORT = process.env.PORT || 3000;
// Serve static files
app.use(express.static(path.join(__dirname, 'public')));
// Store active file transfer sessions
const sessions: Map<string, FileTransferSession> = new Map();
// Configure WebRTC ICE servers (STUN/TURN)
const iceServers = [
{ urls: 'stun:stun.l.google.com:19302' },
{ urls: 'stun:stun1.l.google.com:19302' }
];
io.on('connection', (socket) => {
console.log('Client connected:', socket.id);
// Handle request for available files
socket.on('get-files', () => {
const filesDirectory = path.join(__dirname, 'files');
try {
const files = fs.readdirSync(filesDirectory)
.filter(file => fs.statSync(path.join(filesDirectory, file)).isFile())
.map(file => {
const filePath = path.join(filesDirectory, file);
const stats = fs.statSync(filePath);
return {
name: file,
size: stats.size,
modified: stats.mtime
};
});
socket.emit('files-list', files);
} catch (err) {
console.error('Error reading files directory:', err);
socket.emit('error', 'Failed to retrieve files list');
}
});
// Handle file transfer request
socket.on('request-file', (fileName: string) => {
const filePath = path.join(__dirname, 'files', fileName);
// Check if file exists
if (!fs.existsSync(filePath)) {
return socket.emit('error', 'File not found');
}
const fileSize = fs.statSync(filePath).size;
const chunkSize = 16384; // 16KB chunks
// Create and configure peer connection
const peerConnection = new RTCPeerConnection({ iceServers });
// Create data channel
const dataChannel = peerConnection.createDataChannel('fileTransfer', {
ordered: true
});
// Store session info
sessions.set(socket.id, {
peerConnection,
dataChannel,
filePath,
fileSize,
chunkSize,
sentBytes: 0
});
// Handle ICE candidates
peerConnection.onicecandidate = (event) => {
if (event.candidate) {
socket.emit('ice-candidate', event.candidate);
}
};
// Set up data channel handlers
dataChannel.onopen = () => {
console.log(`Data channel opened for client ${socket.id}`);
startFileTransfer(socket.id);
};
dataChannel.onclose = () => {
console.log(`Data channel closed for client ${socket.id}`);
cleanupSession(socket.id);
};
dataChannel.onerror = (error) => {
console.error(`Data channel error for client ${socket.id}:`, error);
cleanupSession(socket.id);
};
// Create offer
peerConnection.createOffer()
.then(offer => peerConnection.setLocalDescription(offer))
.then(() => {
socket.emit('offer', {
sdp: peerConnection.localDescription,
fileInfo: {
name: path.basename(filePath),
size: fileSize
}
});
})
.catch(err => {
console.error('Error creating offer:', err);
socket.emit('error', 'Failed to create connection offer');
cleanupSession(socket.id);
});
});
// Handle answer from browser
socket.on('answer', async (answer: RTCSessionDescription) => {
try {
const session = sessions.get(socket.id);
if (!session) return;
await session.peerConnection.setRemoteDescription(new RTCSessionDescription(answer));
console.log(`Connection established with client ${socket.id}`);
} catch (err) {
console.error('Error setting remote description:', err);
socket.emit('error', 'Failed to establish connection');
cleanupSession(socket.id);
}
});
// Handle ICE candidates from browser
socket.on('ice-candidate', async (candidate: RTCIceCandidate) => {
try {
const session = sessions.get(socket.id);
if (!session) return;
await session.peerConnection.addIceCandidate(new RTCIceCandidate(candidate));
} catch (err) {
console.error('Error adding ICE candidate:', err);
}
});
// Handle client disconnection
socket.on('disconnect', () => {
console.log('Client disconnected:', socket.id);
cleanupSession(socket.id);
});
});
// Start file transfer
function startFileTransfer(socketId: string): void {
const session = sessions.get(socketId);
if (!session || !session.dataChannel) return;
// Send file info first
session.dataChannel.send(JSON.stringify({
type: 'file-info',
name: path.basename(session.filePath),
size: session.fileSize
}));
// Open file stream
session.fileStream = fs.createReadStream(session.filePath, {
highWaterMark: session.chunkSize
});
// Process file chunks
session.fileStream.on('data', (chunk: Buffer) => {
// Check if data channel is still open and ready
if (session.dataChannel?.readyState === 'open') {
// Pause the stream to handle backpressure
session.fileStream?.pause();
// Send chunk as ArrayBuffer
session.dataChannel.send(chunk);
session.sentBytes += chunk.length;
// Report progress
if (session.sentBytes % (5 * 1024 * 1024) === 0) { // Every 5MB
console.log(`Sent ${session.sentBytes / (1024 * 1024)}MB of ${session.fileSize / (1024 * 1024)}MB`);
}
// Check buffer status before resuming
const bufferAmount = session.dataChannel.bufferedAmount;
if (bufferAmount < session.chunkSize * 2) {
// Resume reading if buffer is below threshold
session.fileStream?.resume();
} else {
// Wait for buffer to drain
const checkBuffer = setInterval(() => {
if (session.dataChannel?.bufferedAmount < session.chunkSize) {
clearInterval(checkBuffer);
session.fileStream?.resume();
}
}, 100);
}
}
});
// Handle end of file
session.fileStream.on('end', () => {
if (session.dataChannel?.readyState === 'open') {
session.dataChannel.send(JSON.stringify({ type: 'file-complete' }));
console.log(`File transfer complete for client ${socketId}`);
}
});
// Handle file stream errors
session.fileStream.on('error', (err) => {
console.error(`File stream error for client ${socketId}:`, err);
if (session.dataChannel?.readyState === 'open') {
session.dataChannel.send(JSON.stringify({
type: 'error',
message: 'File read error on server'
}));
}
cleanupSession(socketId);
});
}
// Clean up session resources
function cleanupSession(socketId: string): void {
const session = sessions.get(socketId);
if (!session) return;
if (session.fileStream) {
session.fileStream.destroy();
}
if (session.dataChannel && session.dataChannel.readyState === 'open') {
session.dataChannel.close();
}
session.peerConnection.close();
sessions.delete(socketId);
console.log(`Cleaned up session for client ${socketId}`);
}
// Start the server
server.listen(PORT, () => {
console.log(`Server running on port ${PORT}`);
// Create files directory if it doesn't exist
const filesDir = path.join(__dirname, 'files');
if (!fs.existsSync(filesDir)) {
fs.mkdirSync(filesDir);
console.log('Created files directory');
}
});
*/
//# sourceMappingURL=index.js.map

1
dist/index.js.map vendored Normal file
View File

@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EA6QE"}

9
dist/log/index.d.ts vendored Normal file
View File

@ -0,0 +1,9 @@
/**
* Returns a winston logger configured to service
*
* @param {string} label Label appearing on logger
* @param {string} filename Optional file to write log to
*
* @returns {object} Winston logger
*/
export declare function createLog(label: string, filename?: string | null): import("winston").Logger;

48
dist/log/index.js vendored Normal file
View File

@ -0,0 +1,48 @@
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.createLog = createLog;
/** @module log */
/** Wrapper for winston that tags streams and optionally writes files with a simple interface. */
/** Module now also supports optional papertrail integration, other services to follow */
const winston_1 = require("winston");
const { SPLAT } = require('triple-beam');
const { isObject } = require('lodash');
const APP_NAME = process.env.APP_NAME || 'default';
function formatObject(param) {
if (isObject(param)) {
return JSON.stringify(param);
}
return param;
}
const all = (0, winston_1.format)((info) => {
const splat = info[SPLAT] || [];
const message = formatObject(info.message);
const rest = splat.map(formatObject).join(' ');
info.message = `${message} ${rest}`;
return info;
});
const myFormat = winston_1.format.printf(({ level, message, label, timestamp }) => {
return `${timestamp} [${label}] ${level}: ${message}`;
});
/**
* Returns a winston logger configured to service
*
* @param {string} label Label appearing on logger
* @param {string} filename Optional file to write log to
*
* @returns {object} Winston logger
*/
function createLog(label, filename = null) {
const tports = [new (winston_1.transports.Console)()];
const fmat = winston_1.format.combine(all(), winston_1.format.label({ label }), winston_1.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss.SSS' }), winston_1.format.colorize(), myFormat);
let papertrailOpts;
if (filename !== null) {
tports.push(new (winston_1.transports.File)({ filename }));
}
return (0, winston_1.createLogger)({
format: fmat,
transports: tports
});
}
module.exports = { createLog };
//# sourceMappingURL=index.js.map

1
dist/log/index.js.map vendored Normal file
View File

@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/log/index.ts"],"names":[],"mappings":"AAAA,YAAY,CAAA;;AAuCZ,8BAmBC;AAxDD,kBAAkB;AAClB,iGAAiG;AACjG,yFAAyF;AAEzF,qCAA2D;AAC3D,MAAM,EAAE,KAAK,EAAE,GAAG,OAAO,CAAC,aAAa,CAAC,CAAC;AACzC,MAAM,EAAE,QAAQ,EAAE,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;AAEvC,MAAM,QAAQ,GAAY,OAAO,CAAC,GAAG,CAAC,QAAQ,IAAI,SAAS,CAAC;AAE5D,SAAS,YAAY,CAAE,KAAW;IAChC,IAAI,QAAQ,CAAC,KAAK,CAAC,EAAE,CAAC;QACpB,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;IAC/B,CAAC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,MAAM,GAAG,GAAG,IAAA,gBAAM,EAAC,CAAC,IAAU,EAAE,EAAE;IAC9B,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC;IAChC,MAAM,OAAO,GAAG,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAC3C,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/C,IAAI,CAAC,OAAO,GAAG,GAAG,OAAO,IAAI,IAAI,EAAE,CAAC;IACpC,OAAO,IAAI,CAAC;AAChB,CAAC,CAAC,CAAC;AAEH,MAAM,QAAQ,GAAG,gBAAM,CAAC,MAAM,CAAC,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,EAAE,SAAS,EAAQ,EAAE,EAAE;IAC5E,OAAO,GAAG,SAAS,KAAK,KAAK,KAAK,KAAK,KAAK,OAAO,EAAE,CAAC;AACxD,CAAC,CAAC,CAAC;AAEH;;;;;;;EAOE;AACF,SAAgB,SAAS,CAAE,KAAc,EAAE,WAA2B,IAAI;IACtE,MAAM,MAAM,GAAW,CAAE,IAAI,CAAC,oBAAU,CAAC,OAAO,CAAC,EAAE,CAAE,CAAC;IACtD,MAAM,IAAI,GAAS,gBAAM,CAAC,OAAO,CAC7B,GAAG,EAAE,EACL,gBAAM,CAAC,KAAK,CAAC,EAAE,KAAK,EAAE,CAAC,EACvB,gBAAM,CAAC,SAAS,CAAC,EAAC,MAAM,EAAE,yBAAyB,EAAC,CAAC,EACrD,gBAAM,CAAC,QAAQ,EAAE,EACjB,QAAQ,CACX,CAAC;IACF,IAAI,cAAoB,CAAC;IAEzB,IAAI,QAAQ,KAAK,IAAI,EAAE,CAAC;QACpB,MAAM,CAAC,IAAI,CAAE,IAAI,CAAC,oBAAU,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,EAAE,CAAC,CAAE,CAAC;IACvD,CAAC;IAED,OAAO,IAAA,sBAAY,EAAC;QAChB,MAAM,EAAG,IAAI;QACb,UAAU,EAAG,MAAM;KACtB,CAAC,CAAC;AACP,CAAC;AAED,MAAM,CAAC,OAAO,GAAG,EAAE,SAAS,EAAE,CAAC"}

1
dist/monitor/index.d.ts vendored Normal file
View File

@ -0,0 +1 @@
export {};

33
dist/monitor/index.js vendored Normal file
View File

@ -0,0 +1,33 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const upload_1 = require("../upload");
const log_1 = require("../log");
const chokidar_1 = __importDefault(require("chokidar"));
const EXPIRATION = 3600; //1 hour
const log = (0, log_1.createLog)('files');
async function processUpload(filePath) {
const config = {
region: 'us-east-1',
bucketName: 'your-bucket-name',
expirationSeconds: EXPIRATION
};
log.info(`Started upload: ${filePath}`);
const result = await (0, upload_1.upload)('test', filePath, config);
if (result.success) {
log.info('File ${filePath} uploaded successfully!');
log.info('Private URL:', result.url);
}
else {
log.error('Upload failed:', result.error);
}
}
async function main() {
chokidar_1.default.watch('./watch', { ignored: /(^|[/\\])\../ }).on('all', (event, path) => {
log.info(`File ${path} changed with event type ${event}`);
});
}
main().catch(log.error);
//# sourceMappingURL=index.js.map

1
dist/monitor/index.js.map vendored Normal file
View File

@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/monitor/index.ts"],"names":[],"mappings":";;;;;AAAA,sCAAmC;AACnC,gCAAmC;AAEnC,wDAAgC;AAIhC,MAAM,UAAU,GAAY,IAAI,CAAC,CAAC,QAAQ;AAE1C,MAAM,GAAG,GAAY,IAAA,eAAS,EAAC,OAAO,CAAC,CAAC;AAExC,KAAK,UAAU,aAAa,CAAE,QAAiB;IAC9C,MAAM,MAAM,GAAc;QACzB,MAAM,EAAE,WAAW;QACnB,UAAU,EAAE,kBAAkB;QAC9B,iBAAiB,EAAE,UAAU;KAC7B,CAAC;IAEF,GAAG,CAAC,IAAI,CAAC,mBAAmB,QAAQ,EAAE,CAAC,CAAC;IACxC,MAAM,MAAM,GAAkB,MAAM,IAAA,eAAM,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC;IAErE,IAAI,MAAM,CAAC,OAAO,EAAE,CAAC;QACpB,GAAG,CAAC,IAAI,CAAC,yCAAyC,CAAC,CAAC;QACpD,GAAG,CAAC,IAAI,CAAC,cAAc,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC;IACtC,CAAC;SAAM,CAAC;QACP,GAAG,CAAC,KAAK,CAAC,gBAAgB,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;IAC3C,CAAC;AACF,CAAC;AAED,KAAK,UAAU,IAAI;IAClB,kBAAQ,CAAC,KAAK,CAAC,SAAS,EAAE,EAAE,OAAO,EAAE,cAAc,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,EAAE,CAAC,KAAW,EAAE,IAAa,EAAE,EAAE;QAC/F,GAAG,CAAC,IAAI,CAAC,QAAQ,IAAI,4BAA4B,KAAK,EAAE,CAAC,CAAC;IAC3D,CAAC,CAAC,CAAC;AACJ,CAAC;AAED,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC"}

19
dist/upload/index.d.ts vendored Normal file
View File

@ -0,0 +1,19 @@
interface S3Config {
region: string;
bucketName: string;
expirationSeconds: number;
}
interface UploadResult {
success: boolean;
url?: string;
key?: string;
error?: string;
}
/**
* Uploads a file to S3 and returns a private, time-limited URL to access it
* @param filePath - Absolute or relative path to the file to upload
* @param config - S3 configuration parameters
* @returns Promise containing the result with URL if successful
*/
export declare function upload(id: string, filePath: string, config: S3Config): Promise<UploadResult>;
export type { UploadResult, S3Config };

68
dist/upload/index.js vendored Normal file
View File

@ -0,0 +1,68 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.upload = upload;
const client_s3_1 = require("@aws-sdk/client-s3");
const s3_request_presigner_1 = require("@aws-sdk/s3-request-presigner");
const fs_1 = require("fs");
const path_1 = require("path");
/**
* Uploads a file to S3 and returns a private, time-limited URL to access it
* @param filePath - Absolute or relative path to the file to upload
* @param config - S3 configuration parameters
* @returns Promise containing the result with URL if successful
*/
async function upload(id, filePath, config) {
const fullPath = (0, path_1.resolve)(filePath);
const fileName = (0, path_1.basename)(filePath);
const extName = (0, path_1.extname)(filePath);
const key = `${id}${extName}`;
if (!key) {
return {
success: false,
error: 'Could not create key'
};
}
let fileStream;
try {
fileStream = (0, fs_1.createReadStream)(fullPath);
}
catch (err) {
const error = err;
return {
success: false,
error: `Error reading file: ${error.message}`
};
}
const s3Client = new client_s3_1.S3Client({
region: config.region
});
try {
const uploadCommand = new client_s3_1.PutObjectCommand({
Bucket: config.bucketName,
Key: key,
Body: fileStream
});
await s3Client.send(uploadCommand);
const getCommand = new client_s3_1.GetObjectCommand({
Bucket: config.bucketName,
Key: key
});
const url = await (0, s3_request_presigner_1.getSignedUrl)(s3Client, getCommand, {
expiresIn: config.expirationSeconds
});
return {
success: true,
url,
key
};
}
catch (err) {
const error = err;
return {
success: false,
error: `Error uploading to S3: ${error.message}`
};
}
}
module.exports = { upload };
//# sourceMappingURL=index.js.map

1
dist/upload/index.js.map vendored Normal file
View File

@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/upload/index.ts"],"names":[],"mappings":";;AA0BA,wBA2DC;AArFD,kDAAkF;AAClF,wEAA6D;AAC7D,2BAAkD;AAClD,+BAAkD;AAiBlD;;;;;GAKG;AACI,KAAK,UAAU,MAAM,CAAE,EAAU,EAAE,QAAgB,EAAE,MAAgB;IAC3E,MAAM,QAAQ,GAAY,IAAA,cAAO,EAAC,QAAQ,CAAC,CAAC;IAC5C,MAAM,QAAQ,GAAY,IAAA,eAAQ,EAAC,QAAQ,CAAC,CAAC;IAC7C,MAAM,OAAO,GAAa,IAAA,cAAO,EAAC,QAAQ,CAAC,CAAC;IAC5C,MAAM,GAAG,GAAW,GAAG,EAAE,GAAG,OAAO,EAAE,CAAC;IAEtC,IAAI,CAAC,GAAG,EAAE,CAAC;QACV,OAAO;YACN,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,sBAAsB;SAC7B,CAAC;IACH,CAAC;IAED,IAAI,UAAuB,CAAC;IAE5B,IAAI,CAAC;QACJ,UAAU,GAAG,IAAA,qBAAgB,EAAC,QAAQ,CAAC,CAAC;IACzC,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACd,MAAM,KAAK,GAAU,GAAY,CAAC;QAClC,OAAO;YACN,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,uBAAuB,KAAK,CAAC,OAAO,EAAE;SAC7C,CAAC;IACH,CAAC;IAED,MAAM,QAAQ,GAAa,IAAI,oBAAQ,CAAC;QACvC,MAAM,EAAE,MAAM,CAAC,MAAM;KACrB,CAAC,CAAC;IAEH,IAAI,CAAC;QACJ,MAAM,aAAa,GAAqB,IAAI,4BAAgB,CAAC;YAC5D,MAAM,EAAE,MAAM,CAAC,UAAU;YACzB,GAAG,EAAE,GAAG;YACR,IAAI,EAAE,UAAU;SAChB,CAAC,CAAC;QAEH,MAAM,QAAQ,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;QAEnC,MAAM,UAAU,GAAqB,IAAI,4BAAgB,CAAC;YACzD,MAAM,EAAE,MAAM,CAAC,UAAU;YACzB,GAAG,EAAE,GAAG;SACR,CAAC,CAAC;QAEH,MAAM,GAAG,GAAW,MAAM,IAAA,mCAAY,EAAC,QAAQ,EAAE,UAAU,EAAE;YAC5D,SAAS,EAAE,MAAM,CAAC,iBAAiB;SACnC,CAAC,CAAC;QAEH,OAAO;YACN,OAAO,EAAE,IAAI;YACb,GAAG;YACH,GAAG;SACH,CAAC;IACH,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACd,MAAM,KAAK,GAAW,GAAY,CAAC;QACnC,OAAO;YACN,OAAO,EAAE,KAAK;YACd,KAAK,EAAE,0BAA0B,KAAK,CAAC,OAAO,EAAE;SAChD,CAAC;IACH,CAAC;AACF,CAAC;AAED,MAAM,CAAC,OAAO,GAAG,EAAE,MAAM,EAAE,CAAC"}

14
notes.txt Normal file
View File

@ -0,0 +1,14 @@
keyID:
000e2ee4d941d2c0000000001
keyName:
ICEBOX
applicationKey:
K00090tlkgT+zaH9q+i3DdqId3A+G2I
s3.us-west-000.backblazeb2.com
6e62ee5e043d49d4913d021c

267
notes/s3_simulate.py Normal file
View File

@ -0,0 +1,267 @@
import datetime
from dataclasses import dataclass
from typing import List, Dict, Tuple
@dataclass
class S3PricingConfig:
# Storage pricing tiers in GB (tier_min, tier_max, price_per_gb_month)
storage_tiers: List[Tuple[float, float, float]]
# Data transfer pricing in/out in GB (tier_min, tier_max, price_per_gb)
transfer_in_tiers: List[Tuple[float, float, float]]
transfer_out_tiers: List[Tuple[float, float, float]]
# Request pricing (per 1000 requests)
put_request_price: float
get_request_price: float
delete_request_price: float
@dataclass
class S3Usage:
file_size_gb: float
upload_date: datetime.date
download_count: int
delete_date: datetime.date = None # None means file is not deleted
class S3PricingSimulator:
def __init__(self, pricing_config: S3PricingConfig):
self.config = pricing_config
def calculate_storage_cost(self, usage_list: List[S3Usage], start_date: datetime.date, end_date: datetime.date) -> Dict:
"""Calculate storage costs for the given time period across all usage items."""
# Track total GB stored per day
daily_storage = {}
current_date = start_date
while current_date <= end_date:
daily_storage[current_date] = 0
for usage in usage_list:
if usage.upload_date <= current_date and (usage.delete_date is None or usage.delete_date > current_date):
daily_storage[current_date] += usage.file_size_gb
current_date += datetime.timedelta(days=1)
# Calculate monthly storage
monthly_storage = {}
for date, storage_gb in daily_storage.items():
month_key = f"{date.year}-{date.month:02d}"
if month_key not in monthly_storage:
monthly_storage[month_key] = []
monthly_storage[month_key].append(storage_gb)
# Calculate costs per month
storage_costs = {}
for month, daily_gb_values in monthly_storage.items():
# Average GB stored in the month
avg_gb = sum(daily_gb_values) / len(daily_gb_values)
cost = self._calculate_tiered_cost(avg_gb, self.config.storage_tiers)
storage_costs[month] = cost
return {
"daily_storage_gb": daily_storage,
"monthly_avg_storage_gb": {month: sum(days)/len(days) for month, days in monthly_storage.items()},
"monthly_storage_cost": storage_costs,
"total_storage_cost": sum(storage_costs.values())
}
def calculate_transfer_costs(self, usage_list: List[S3Usage]) -> Dict:
"""Calculate data transfer costs for all usage items."""
total_transfer_in = 0
total_transfer_out = 0
for usage in usage_list:
# Each file is uploaded once (transfer in)
total_transfer_in += usage.file_size_gb
# And downloaded multiple times (transfer out)
total_transfer_out += usage.file_size_gb * usage.download_count
transfer_in_cost = self._calculate_tiered_cost(total_transfer_in, self.config.transfer_in_tiers)
transfer_out_cost = self._calculate_tiered_cost(total_transfer_out, self.config.transfer_out_tiers)
return {
"total_transfer_in_gb": total_transfer_in,
"total_transfer_out_gb": total_transfer_out,
"transfer_in_cost": transfer_in_cost,
"transfer_out_cost": transfer_out_cost,
"total_transfer_cost": transfer_in_cost + transfer_out_cost
}
def calculate_request_costs(self, usage_list: List[S3Usage]) -> Dict:
"""Calculate request costs for all usage items."""
# Each file has 1 PUT, n GETs, and potentially 1 DELETE
put_requests = len(usage_list)
get_requests = sum(usage.download_count for usage in usage_list)
delete_requests = sum(1 for usage in usage_list if usage.delete_date is not None)
put_cost = (put_requests / 1000) * self.config.put_request_price
get_cost = (get_requests / 1000) * self.config.get_request_price
delete_cost = (delete_requests / 1000) * self.config.delete_request_price
return {
"put_requests": put_requests,
"get_requests": get_requests,
"delete_requests": delete_requests,
"put_cost": put_cost,
"get_cost": get_cost,
"delete_cost": delete_cost,
"total_request_cost": put_cost + get_cost + delete_cost
}
def simulate(self, usage_list: List[S3Usage], start_date: datetime.date = None, end_date: datetime.date = None) -> Dict:
"""Run a complete simulation with the given usage patterns."""
if not usage_list:
return {"error": "No usage items provided."}
# Determine the simulation time period if not specified
if start_date is None:
start_date = min(usage.upload_date for usage in usage_list)
if end_date is None:
# Find latest date among delete_dates (considering None as "not deleted")
latest_delete = max((u.delete_date for u in usage_list if u.delete_date is not None), default=None)
# If no files are deleted, simulate for one month from the last upload
if latest_delete is None:
latest_upload = max(u.upload_date for u in usage_list)
end_date = latest_upload + datetime.timedelta(days=30)
else:
end_date = latest_delete
# Run the individual cost calculations
storage_results = self.calculate_storage_cost(usage_list, start_date, end_date)
transfer_results = self.calculate_transfer_costs(usage_list)
request_results = self.calculate_request_costs(usage_list)
# Combine all results
total_cost = (
storage_results["total_storage_cost"] +
transfer_results["total_transfer_cost"] +
request_results["total_request_cost"]
)
return {
"simulation_period": {
"start_date": start_date,
"end_date": end_date
},
"storage": storage_results,
"transfer": transfer_results,
"requests": request_results,
"total_cost": total_cost
}
def _calculate_tiered_cost(self, amount: float, tiers: List[Tuple[float, float, float]]) -> float:
"""Calculate cost based on tiered pricing."""
if amount <= 0:
return 0
total_cost = 0
remaining = amount
for tier_min, tier_max, price_per_unit in tiers:
# Skip tiers below our amount
if tier_max <= 0 or tier_min >= remaining:
continue
# Calculate how much falls into this tier
tier_amount = min(remaining, tier_max - tier_min)
total_cost += tier_amount * price_per_unit
remaining -= tier_amount
# If we've accounted for everything, stop
if remaining <= 0:
break
return total_cost
# Example usage
def run_example():
# Sample pricing configuration based on approximated AWS S3 Standard pricing
amazon_pricing = S3PricingConfig(
# Storage tiers (GB range min, max, price per GB-month)
storage_tiers=[
(0, 50 * 1024, 0.023), # First 50 TB
(50 * 1024, 450 * 1024, 0.022), # Next 400 TB
(450 * 1024, float('inf'), 0.021) # Over 450 TB
],
# Data transfer in (usually free)
transfer_in_tiers=[
(0, float('inf'), 0.0)
],
# Data transfer out tiers
transfer_out_tiers=[
(0, 1, 0.0), # First 1 GB free
(1, 10 * 1024, 0.09), # Up to 10 TB
(10 * 1024, 50 * 1024, 0.085), # Next 40 TB
(50 * 1024, 150 * 1024, 0.07), # Next 100 TB
(150 * 1024, float('inf'), 0.05) # Over 150 TB
],
# Request pricing (per 1000)
put_request_price=0.005,
get_request_price=0.0004,
delete_request_price=0.0
)
#
backblaze_pricing = S3PricingConfig(
# Storage tiers (GB range min, max, price per GB-month)
storage_tiers=[
(0, 10, 0.023), # First 10 GB
(10, float('inf'), 0.0006)
],
# Data transfer in (usually free)
transfer_in_tiers=[
(0, float('inf'), 0.0)
],
# Data transfer out tiers
transfer_out_tiers=[
(0, float('inf'), 0.01) # Over 150 TB
],
# Request pricing (per 1000)
put_request_price=0.000,
get_request_price=0.0004,
delete_request_price=0.0
)
simulator = S3PricingSimulator(backblaze_pricing)
# Example scenario: large file stored for 90 days with frequent downloads
today = datetime.date.today()
large_file = S3Usage(
file_size_gb=80,
upload_date=today,
download_count=7,
delete_date=today + datetime.timedelta(days=15)
)
# A few smaller files with varying lifetimes
usage_list = [ large_file ] * 50
# Run the simulation
results = simulator.simulate(usage_list)
# Print the results
print("S3 Cost Simulation Results")
print("=========================")
print(f"Period: {results['simulation_period']['start_date']} to {results['simulation_period']['end_date']}")
print("\nStorage Costs:")
for month, cost in results['storage']['monthly_storage_cost'].items():
avg_gb = results['storage']['monthly_avg_storage_gb'][month]
print(f" {month}: {avg_gb:.2f} GB (avg) = ${cost:.2f}")
print(f" Total: ${results['storage']['total_storage_cost']:.2f}")
print("\nData Transfer Costs:")
print(f" In: {results['transfer']['total_transfer_in_gb']:.2f} GB = ${results['transfer']['transfer_in_cost']:.2f}")
print(f" Out: {results['transfer']['total_transfer_out_gb']:.2f} GB = ${results['transfer']['transfer_out_cost']:.2f}")
print(f" Total: ${results['transfer']['total_transfer_cost']:.2f}")
print("\nRequest Costs:")
print(f" PUT: {results['requests']['put_requests']} requests = ${results['requests']['put_cost']:.4f}")
print(f" GET: {results['requests']['get_requests']} requests = ${results['requests']['get_cost']:.4f}")
print(f" DELETE: {results['requests']['delete_requests']} requests = ${results['requests']['delete_cost']:.4f}")
print(f" Total: ${results['requests']['total_request_cost']:.4f}")
print("\nTotal Estimated Cost: ${:.2f}".format(results['total_cost']))
if __name__ == "__main__":
run_example()

3844
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

38
package.json Normal file
View File

@ -0,0 +1,38 @@
{
"name": "webrtc-file-transfer",
"version": "1.0.0",
"description": "WebRTC large file transfer solution with Node.js and Express",
"main": "dist/index.js",
"scripts": {
"start": "node dist",
"compile": "./node_modules/.bin/tsc -p tsconfig.json",
"compile:client": "./node_modules/.bin/tsc -p tsconfig.browser.json"
},
"keywords": [
"webrtc",
"file-transfer",
"socket.io",
"express",
"typescript"
],
"author": "mattmcw",
"license": "MIT",
"dependencies": {
"@aws-sdk/client-s3": "^3.777.0",
"@aws-sdk/s3-request-presigner": "^3.777.0",
"bcrypt": "^5.1.1",
"chokidar": "^4.0.3",
"dotenv": "^16.4.7",
"express": "^4.18.2",
"lodash": "^4.17.21",
"socket.io": "^4.7.2",
"socket.io-client": "^4.7.2",
"triple-beam": "^1.4.1",
"winston": "^3.17.0"
},
"devDependencies": {
"@types/express": "^4.17.17",
"@types/node": "^20.5.1",
"typescript": "^5.1.6"
}
}

234
public/js/index.js Normal file
View File

@ -0,0 +1,234 @@
System.register(["socket.io-client"], function (exports_1, context_1) {
"use strict";
var socket_io_client_1, WebRTCFileClient;
var __moduleName = context_1 && context_1.id;
return {
setters: [
function (socket_io_client_1_1) {
socket_io_client_1 = socket_io_client_1_1;
}
],
execute: function () {
WebRTCFileClient = class WebRTCFileClient {
constructor(serverUrl = window.location.origin) {
this.peerConnection = null;
this.dataChannel = null;
this.receivedSize = 0;
this.fileSize = 0;
this.fileName = '';
this.fileChunks = [];
this.onProgressCallback = null;
this.onCompleteCallback = null;
this.onErrorCallback = null;
this.onFilesListCallback = null;
this.socket = socket_io_client_1.io(serverUrl);
this.setupSocketListeners();
}
setupSocketListeners() {
this.socket.on('connect', () => {
console.log('Connected to server, socket ID:', this.socket.id);
});
this.socket.on('offer', (data) => {
console.log('Received offer from server');
this.fileName = data.fileInfo.name;
this.fileSize = data.fileInfo.size;
this.setupPeerConnection();
this.handleOffer(data.sdp);
});
this.socket.on('ice-candidate', (candidate) => {
if (this.peerConnection) {
this.peerConnection.addIceCandidate(new RTCIceCandidate(candidate))
.catch(err => console.error('Error adding received ice candidate', err));
}
});
this.socket.on('error', (errorMessage) => {
console.error('Server error:', errorMessage);
if (this.onErrorCallback) {
this.onErrorCallback(errorMessage);
}
});
this.socket.on('files-list', (files) => {
console.log('Received files list:', files);
if (this.onFilesListCallback) {
this.onFilesListCallback(files);
}
});
this.socket.on('disconnect', () => {
console.log('Disconnected from server');
this.cleanupFileTransfer();
});
}
setupPeerConnection() {
if (this.peerConnection) {
this.peerConnection.close();
}
const configuration = {
iceServers: [
{ urls: 'stun:stun.l.google.com:19302' },
{ urls: 'stun:stun1.l.google.com:19302' }
]
};
this.peerConnection = new RTCPeerConnection(configuration);
this.fileChunks = [];
this.receivedSize = 0;
this.peerConnection.onicecandidate = (event) => {
if (event.candidate) {
this.socket.emit('ice-candidate', event.candidate);
}
};
this.peerConnection.ondatachannel = (event) => {
this.dataChannel = event.channel;
this.setupDataChannel();
};
}
setupDataChannel() {
if (!this.dataChannel)
return;
this.dataChannel.binaryType = 'arraybuffer';
this.dataChannel.onopen = () => {
console.log('Data channel is open');
};
this.dataChannel.onmessage = (event) => {
const data = event.data;
if (typeof data === 'string') {
try {
const message = JSON.parse(data);
if (message.type === 'file-info') {
this.fileName = message.name;
this.fileSize = message.size;
this.fileChunks = [];
this.receivedSize = 0;
console.log(`Receiving file: ${this.fileName}, Size: ${this.formatFileSize(this.fileSize)}`);
}
else if (message.type === 'file-complete') {
this.completeFileTransfer();
}
else if (message.type === 'error') {
if (this.onErrorCallback) {
this.onErrorCallback(message.message);
}
}
}
catch (e) {
console.error('Error parsing message:', e);
}
}
else if (data instanceof ArrayBuffer) {
this.fileChunks.push(new Uint8Array(data));
this.receivedSize += data.byteLength;
if (this.onProgressCallback && this.fileSize > 0) {
const progress = Math.min((this.receivedSize / this.fileSize) * 100, 100);
this.onProgressCallback(progress);
}
}
};
this.dataChannel.onclose = () => {
console.log('Data channel closed');
};
this.dataChannel.onerror = (error) => {
console.error('Data channel error:', error);
if (this.onErrorCallback) {
this.onErrorCallback('Data channel error');
}
};
}
async handleOffer(sdp) {
if (!this.peerConnection)
return;
try {
await this.peerConnection.setRemoteDescription(new RTCSessionDescription(sdp));
const answer = await this.peerConnection.createAnswer();
await this.peerConnection.setLocalDescription(answer);
this.socket.emit('answer', answer);
}
catch (error) {
console.error('Error handling offer:', error);
if (this.onErrorCallback) {
this.onErrorCallback('Failed to establish connection');
}
}
}
completeFileTransfer() {
if (this.fileChunks.length === 0) {
console.error('No file data received');
if (this.onErrorCallback) {
this.onErrorCallback('No file data received');
}
return;
}
let totalLength = 0;
for (const chunk of this.fileChunks) {
totalLength += chunk.length;
}
const completeFile = new Uint8Array(totalLength);
let offset = 0;
for (const chunk of this.fileChunks) {
completeFile.set(chunk, offset);
offset += chunk.length;
}
const fileBlob = new Blob([completeFile]);
console.log(`File transfer complete: ${this.fileName}, Size: ${this.formatFileSize(fileBlob.size)}`);
if (this.onCompleteCallback) {
this.onCompleteCallback(fileBlob, this.fileName);
}
this.cleanupFileTransfer();
}
cleanupFileTransfer() {
this.fileChunks = [];
this.receivedSize = 0;
if (this.dataChannel) {
this.dataChannel.close();
this.dataChannel = null;
}
if (this.peerConnection) {
this.peerConnection.close();
this.peerConnection = null;
}
}
formatFileSize(bytes) {
if (bytes === 0)
return '0 Bytes';
const k = 1024;
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
}
getFilesList() {
this.socket.emit('get-files');
}
requestFile(fileName) {
this.socket.emit('request-file', fileName);
}
onProgress(callback) {
this.onProgressCallback = callback;
}
onComplete(callback) {
this.onCompleteCallback = callback;
}
onError(callback) {
this.onErrorCallback = callback;
}
onFilesList(callback) {
this.onFilesListCallback = callback;
}
saveFile(blob, fileName) {
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = fileName;
document.body.appendChild(a);
a.click();
window.setTimeout(() => {
document.body.removeChild(a);
URL.revokeObjectURL(url);
}, 0);
}
disconnect() {
this.socket.disconnect();
this.cleanupFileTransfer();
}
};
exports_1("WebRTCFileClient", WebRTCFileClient);
}
};
});

24
sql/setup.sql Normal file
View File

@ -0,0 +1,24 @@
CREATE TABLE IF NOT EXISTS files {
id TEXT PRIMARY KEY,
filename TEXT,
original TEXT,
archive TEXT,
paths TEXT,
hash TEXT,
size INTEGER,
created INTEGER,
created_str TEXT
};
CREATE TABLE IF NOT EXISTS access {
id TEXT PRIMARY KEY,
file TEXT REFERENCES files(id),
created INTEGER,
created_str TEXT,
expires INTEGER,
expires_str TEXT,
paid INTEGER DEFAULT 0,
url TEXT UNIQUE,
service TEXT,
downloads INTEGER DEFAULT 0
}

13
src/env/index.ts vendored Normal file
View File

@ -0,0 +1,13 @@
export function envString (variable : string, defaultString : string) : string {
return typeof process.env[variable] !== 'undefined' ? process.env[variable] : defaultString;
}
export function envFloat (variable : string, defaultFloat : number ) : number {
return typeof process.env[variable] !== 'undefined' ? parseFloat(process.env[variable]) : defaultFloat;
}
export function envInt (variable : string, defaultInt : number ) : number {
return typeof process.env[variable] !== 'undefined' ? parseInt(process.env[variable]) : defaultInt;
}
module.exports = { envString, envFloat, envInt };

59
src/hash/index.ts Normal file
View File

@ -0,0 +1,59 @@
import { createHash, Hash } from 'crypto';
import { createReadStream } from 'fs';
import { unlink } from 'fs/promises';
import { exec } from 'child_process';
import { tmp } from '../tmp';
export class Hashes {
public static async file (path : string) : Promise<string> {
return new Promise((resolve : Function, reject : Function) => {
const hashSum : Hash = createHash('sha256');
const stream : any = createReadStream(path);
stream.on('error', (err : Error) => reject(err));
stream.on('data', (chunk : Buffer) => hashSum.update(chunk));
stream.on('end', () => resolve(hashSum.digest('hex')));
});
}
public static string (str : string) : string {
const sha : Hash = createHash('sha256').update(str);
return sha.digest('hex');
}
public static async gcode (gcodePath : string) : Promise<string> {
const dest : string = tmp('.gcode', '3d-gcode-hash-');
const cmd : string = `cat "${gcodePath}" | grep -v '; generated by PrusaSlicer' > "${dest}"`
let hash : string;
try {
await Hashes.exec(cmd);
} catch (err) {
throw err;
}
try {
hash = await Hashes.file(dest);
} catch (err) {
throw err;
}
try {
await unlink(dest);
} catch (err) {
throw err;
}
return hash;
}
private static async exec (cmd : string) {
return new Promise((resolve : Function, reject : Function) => {
return exec(cmd, (err : Error, stdout : string | Buffer, stderr : string | Buffer) => {
if (err) return reject(err);
return resolve(true);
});
});
}
}
module.exports = { Hashes };

280
src/index.ts Normal file
View File

@ -0,0 +1,280 @@
import express from 'express';
import { envInt } from './env';
import { join } from 'path';
const app = express();
const PORT = process.env.PORT || 3000;
// Serve static files
app.use(express.static(join(__dirname, 'public')));
/*
import http from 'http';
import path from 'path';
import fs from 'fs';
import { Server as SocketIOServer } from 'socket.io';
import { RTCPeerConnection, RTCSessionDescription, RTCIceCandidate } from 'wrtc';
// Define types
interface FileTransferSession {
peerConnection: RTCPeerConnection;
dataChannel?: RTCDataChannel;
fileStream?: fs.ReadStream;
filePath: string;
fileSize: number;
chunkSize: number;
sentBytes: number;
}
// Initialize express app
const app = express();
const server = http.createServer(app);
const io = new SocketIOServer(server);
const PORT = process.env.PORT || 3000;
// Serve static files
app.use(express.static(path.join(__dirname, 'public')));
// Store active file transfer sessions
const sessions: Map<string, FileTransferSession> = new Map();
// Configure WebRTC ICE servers (STUN/TURN)
const iceServers = [
{ urls: 'stun:stun.l.google.com:19302' },
{ urls: 'stun:stun1.l.google.com:19302' }
];
io.on('connection', (socket) => {
console.log('Client connected:', socket.id);
// Handle request for available files
socket.on('get-files', () => {
const filesDirectory = path.join(__dirname, 'files');
try {
const files = fs.readdirSync(filesDirectory)
.filter(file => fs.statSync(path.join(filesDirectory, file)).isFile())
.map(file => {
const filePath = path.join(filesDirectory, file);
const stats = fs.statSync(filePath);
return {
name: file,
size: stats.size,
modified: stats.mtime
};
});
socket.emit('files-list', files);
} catch (err) {
console.error('Error reading files directory:', err);
socket.emit('error', 'Failed to retrieve files list');
}
});
// Handle file transfer request
socket.on('request-file', (fileName: string) => {
const filePath = path.join(__dirname, 'files', fileName);
// Check if file exists
if (!fs.existsSync(filePath)) {
return socket.emit('error', 'File not found');
}
const fileSize = fs.statSync(filePath).size;
const chunkSize = 16384; // 16KB chunks
// Create and configure peer connection
const peerConnection = new RTCPeerConnection({ iceServers });
// Create data channel
const dataChannel = peerConnection.createDataChannel('fileTransfer', {
ordered: true
});
// Store session info
sessions.set(socket.id, {
peerConnection,
dataChannel,
filePath,
fileSize,
chunkSize,
sentBytes: 0
});
// Handle ICE candidates
peerConnection.onicecandidate = (event) => {
if (event.candidate) {
socket.emit('ice-candidate', event.candidate);
}
};
// Set up data channel handlers
dataChannel.onopen = () => {
console.log(`Data channel opened for client ${socket.id}`);
startFileTransfer(socket.id);
};
dataChannel.onclose = () => {
console.log(`Data channel closed for client ${socket.id}`);
cleanupSession(socket.id);
};
dataChannel.onerror = (error) => {
console.error(`Data channel error for client ${socket.id}:`, error);
cleanupSession(socket.id);
};
// Create offer
peerConnection.createOffer()
.then(offer => peerConnection.setLocalDescription(offer))
.then(() => {
socket.emit('offer', {
sdp: peerConnection.localDescription,
fileInfo: {
name: path.basename(filePath),
size: fileSize
}
});
})
.catch(err => {
console.error('Error creating offer:', err);
socket.emit('error', 'Failed to create connection offer');
cleanupSession(socket.id);
});
});
// Handle answer from browser
socket.on('answer', async (answer: RTCSessionDescription) => {
try {
const session = sessions.get(socket.id);
if (!session) return;
await session.peerConnection.setRemoteDescription(new RTCSessionDescription(answer));
console.log(`Connection established with client ${socket.id}`);
} catch (err) {
console.error('Error setting remote description:', err);
socket.emit('error', 'Failed to establish connection');
cleanupSession(socket.id);
}
});
// Handle ICE candidates from browser
socket.on('ice-candidate', async (candidate: RTCIceCandidate) => {
try {
const session = sessions.get(socket.id);
if (!session) return;
await session.peerConnection.addIceCandidate(new RTCIceCandidate(candidate));
} catch (err) {
console.error('Error adding ICE candidate:', err);
}
});
// Handle client disconnection
socket.on('disconnect', () => {
console.log('Client disconnected:', socket.id);
cleanupSession(socket.id);
});
});
// Start file transfer
function startFileTransfer(socketId: string): void {
const session = sessions.get(socketId);
if (!session || !session.dataChannel) return;
// Send file info first
session.dataChannel.send(JSON.stringify({
type: 'file-info',
name: path.basename(session.filePath),
size: session.fileSize
}));
// Open file stream
session.fileStream = fs.createReadStream(session.filePath, {
highWaterMark: session.chunkSize
});
// Process file chunks
session.fileStream.on('data', (chunk: Buffer) => {
// Check if data channel is still open and ready
if (session.dataChannel?.readyState === 'open') {
// Pause the stream to handle backpressure
session.fileStream?.pause();
// Send chunk as ArrayBuffer
session.dataChannel.send(chunk);
session.sentBytes += chunk.length;
// Report progress
if (session.sentBytes % (5 * 1024 * 1024) === 0) { // Every 5MB
console.log(`Sent ${session.sentBytes / (1024 * 1024)}MB of ${session.fileSize / (1024 * 1024)}MB`);
}
// Check buffer status before resuming
const bufferAmount = session.dataChannel.bufferedAmount;
if (bufferAmount < session.chunkSize * 2) {
// Resume reading if buffer is below threshold
session.fileStream?.resume();
} else {
// Wait for buffer to drain
const checkBuffer = setInterval(() => {
if (session.dataChannel?.bufferedAmount < session.chunkSize) {
clearInterval(checkBuffer);
session.fileStream?.resume();
}
}, 100);
}
}
});
// Handle end of file
session.fileStream.on('end', () => {
if (session.dataChannel?.readyState === 'open') {
session.dataChannel.send(JSON.stringify({ type: 'file-complete' }));
console.log(`File transfer complete for client ${socketId}`);
}
});
// Handle file stream errors
session.fileStream.on('error', (err) => {
console.error(`File stream error for client ${socketId}:`, err);
if (session.dataChannel?.readyState === 'open') {
session.dataChannel.send(JSON.stringify({
type: 'error',
message: 'File read error on server'
}));
}
cleanupSession(socketId);
});
}
// Clean up session resources
function cleanupSession(socketId: string): void {
const session = sessions.get(socketId);
if (!session) return;
if (session.fileStream) {
session.fileStream.destroy();
}
if (session.dataChannel && session.dataChannel.readyState === 'open') {
session.dataChannel.close();
}
session.peerConnection.close();
sessions.delete(socketId);
console.log(`Cleaned up session for client ${socketId}`);
}
// Start the server
server.listen(PORT, () => {
console.log(`Server running on port ${PORT}`);
// Create files directory if it doesn't exist
const filesDir = path.join(__dirname, 'files');
if (!fs.existsSync(filesDir)) {
fs.mkdirSync(filesDir);
console.log('Created files directory');
}
});
*/

61
src/log/index.ts Normal file
View File

@ -0,0 +1,61 @@
'use strict'
/** @module log */
/** Wrapper for winston that tags streams and optionally writes files with a simple interface. */
/** Module now also supports optional papertrail integration, other services to follow */
import { format, transports, createLogger } from 'winston';
const { SPLAT } = require('triple-beam');
const { isObject } = require('lodash');
const APP_NAME : string = process.env.APP_NAME || 'default';
function formatObject (param : any) {
if (isObject(param)) {
return JSON.stringify(param);
}
return param;
}
const all = format((info : any) => {
const splat = info[SPLAT] || [];
const message = formatObject(info.message);
const rest = splat.map(formatObject).join(' ');
info.message = `${message} ${rest}`;
return info;
});
const myFormat = format.printf(({ level, message, label, timestamp } : any) => {
return `${timestamp} [${label}] ${level}: ${message}`;
});
/**
* Returns a winston logger configured to service
*
* @param {string} label Label appearing on logger
* @param {string} filename Optional file to write log to
*
* @returns {object} Winston logger
*/
export function createLog (label : string, filename : string | null = null) {
const tports : any[] = [ new (transports.Console)() ];
const fmat : any = format.combine(
all(),
format.label({ label }),
format.timestamp({format: 'YYYY-MM-DD HH:mm:ss.SSS'}),
format.colorize(),
myFormat,
);
let papertrailOpts : any;
if (filename !== null) {
tports.push( new (transports.File)({ filename }) );
}
return createLogger({
format : fmat,
transports : tports
});
}
module.exports = { createLog };

78
src/monitor/index.ts Normal file
View File

@ -0,0 +1,78 @@
import 'dotenv/config';
import { upload } from '../upload';
import { createLog } from '../log';
import chokidar from 'chokidar';
import type { Logger } from 'winston';
import type { UploadResult, S3Config } from '../upload';
const EXPIRATION : number = 3600; //1 hour
const log : Logger = createLog('files');
async function processUpload (filePath : string) {
const config : S3Config = {
region: 'us-east-1',
bucketName: 'your-bucket-name',
expirationSeconds: EXPIRATION
};
log.info(`Started upload: ${filePath}`);
const result : UploadResult = await upload('test', filePath, config);
if (result.success) {
log.info('File ${filePath} uploaded successfully!');
log.info('Private URL:', result.url);
} else {
log.error('Upload failed:', result.error);
}
}
async function main() {
chokidar.watch('./watch', { ignored: /(^|[/\\])\../ }).on('all', (event : any, path : string) => {
log.info(`File ${path} changed with event type ${event}`);
});
}
main().catch(log.error);
/*
const minioResult = await uploadFileToS3('/path/to/your/file.pdf', {
region: 'us-east-1', // Region can be any string for MinIO
endpoint: 'https://minio.your-domain.com',
bucketName: 'your-minio-bucket',
credentials: {
accessKeyId: 'your-minio-access-key',
secretAccessKey: 'your-minio-secret-key'
},
forcePathStyle: true, // Important for most S3-compatible services
expirationSeconds: 3600
});
// Example 3: DigitalOcean Spaces
const spacesResult = await uploadFileToS3('/path/to/your/file.pdf', {
region: 'nyc3', // DigitalOcean datacenter region
endpoint: 'https://nyc3.digitaloceanspaces.com',
bucketName: 'your-space-name',
credentials: {
accessKeyId: 'your-spaces-key',
secretAccessKey: 'your-spaces-secret'
},
forcePathStyle: true,
expirationSeconds: 7200 // 2 hours
});
// Example 4: Wasabi
const wasabiResult = await uploadFileToS3('/path/to/your/file.pdf', {
region: 'us-east-1',
endpoint: 'https://s3.wasabisys.com',
bucketName: 'your-wasabi-bucket',
credentials: {
accessKeyId: 'your-wasabi-access-key',
secretAccessKey: 'your-wasabi-secret-key'
},
forcePathStyle: true,
expirationSeconds: 86400 // 24 hours
});
*/

9
src/size/index.ts Normal file
View File

@ -0,0 +1,9 @@
import { stat } from 'fs/promises';
import type { Stats } from 'fs';
export async function size (filePath : string) : Promise<number> {
const stats : Stats = await stat(filePath);
return stats.size;
}
module.exports = { size };

108
src/upload/index.ts Normal file
View File

@ -0,0 +1,108 @@
import { S3Client, PutObjectCommand, GetObjectCommand } from '@aws-sdk/client-s3';
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
import { ReadStream, createReadStream } from 'fs';
import { resolve, basename, extname } from 'path';
// S3 configuration interface
interface S3Config {
region: string;
bucketName: string;
expirationSeconds: number;
endpoint?: string;
credentials?: {
accessKeyId: string;
secretAccessKey: string;
};
forcePathStyle?: boolean;
}
// Function response interface
interface UploadResult {
success: boolean;
url?: string;
key?: string;
error?: string;
}
/**
* Uploads a file to S3 and returns a private, time-limited URL to access it
* @param filePath - Absolute or relative path to the file to upload
* @param config - S3 configuration parameters
* @returns Promise containing the result with URL if successful
*/
export async function upload (id: string, filePath: string, config: S3Config): Promise<UploadResult> {
const fullPath : string = resolve(filePath);
const fileName : string = basename(filePath);
const extName : string = extname(filePath);
const key: string = `${id}${extName}`;
if (!key) {
return {
success: false,
error: 'Could not create key'
};
}
let fileStream : ReadStream;
try {
fileStream = createReadStream(fullPath);
} catch (err) {
const error: Error = err as Error;
return {
success: false,
error: `Error reading file: ${error.message}`
};
}
const s3ClientOptions: Record<string, any> = {
region: config.region
};
if (config.endpoint) {
s3ClientOptions.endpoint = config.endpoint;
}
if (config.credentials) {
s3ClientOptions.credentials = config.credentials;
}
if (config.forcePathStyle !== undefined) {
s3ClientOptions.forcePathStyle = config.forcePathStyle;
}
const s3Client: S3Client = new S3Client(s3ClientOptions);
try {
const uploadCommand: PutObjectCommand = new PutObjectCommand({
Bucket: config.bucketName,
Key: key,
Body: fileStream
});
await s3Client.send(uploadCommand);
const getCommand: GetObjectCommand = new GetObjectCommand({
Bucket: config.bucketName,
Key: key
});
const url: string = await getSignedUrl(s3Client, getCommand, {
expiresIn: config.expirationSeconds
});
return {
success: true,
url,
key
};
} catch (err) {
const error : Error = err as Error;
return {
success: false,
error: `Error uploading to S3: ${error.message}`
};
}
}
module.exports = { upload };
export type { UploadResult, S3Config };

14
tsconfig.browser.json Normal file
View File

@ -0,0 +1,14 @@
{
"compilerOptions": {
"baseUrl": "./client/",
"outDir": "./public/js/",
"target": "es2020",
"module": "system",
"removeComments": true,
"moduleResolution": "node",
"lib": ["dom", "es2020"]
},
"include": [
"./client/*"
]
}

16
tsconfig.json Normal file
View File

@ -0,0 +1,16 @@
{
"compilerOptions": {
"target": "ES2020",
"module": "commonjs",
"moduleResolution": "node",
"esModuleInterop": true,
"strict": true,
"outDir": "./dist",
"sourceMap": true,
"declaration": true,
"forceConsistentCasingInFileNames": true,
"lib": ["DOM", "ES2020"]
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"]
}

217
views/index.html Normal file
View File

@ -0,0 +1,217 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>WebRTC File Transfer</title>
<style>
body {
font-family: Arial, sans-serif;
max-width: 800px;
margin: 0 auto;
padding: 20px;
}
h1 {
color: #333;
}
.file-list {
border: 1px solid #ddd;
border-radius: 4px;
margin-bottom: 20px;
max-height: 300px;
overflow-y: auto;
}
.file-item {
padding: 10px;
border-bottom: 1px solid #eee;
cursor: pointer;
display: flex;
justify-content: space-between;
}
.file-item:hover {
background-color: #f5f5f5;
}
.file-item:last-child {
border-bottom: none;
}
.progress-container {
margin: 20px 0;
display: none;
}
.progress-bar {
width: 100%;
background-color: #f0f0f0;
border-radius: 4px;
overflow: hidden;
}
.progress {
height: 20px;
background-color: #4CAF50;
width: 0%;
transition: width 0.3s ease;
}
.progress-text {
margin-top: 5px;
font-size: 14px;
color: #666;
}
.action-buttons {
margin: 20px 0;
}
button {
padding: 8px 16px;
background-color: #4CAF50;
color: white;
border: none;
border-radius: 4px;
cursor: pointer;
margin-right: 10px;
}
button:hover {
background-color: #45a049;
}
button:disabled {
background-color: #cccccc;
cursor: not-allowed;
}
.status {
margin-top: 10px;
padding: 10px;
border-radius: 4px;
}
.success {
background-color: #e8f5e9;
color: #2e7d32;
}
.error {
background-color: #ffebee;
color: #c62828;
}
</style>
</head>
<body>
<h1>WebRTC File Transfer</h1>
<div class="action-buttons">
<button id="refreshBtn">Refresh File List</button>
</div>
<h2>Available Files</h2>
<div class="file-list" id="fileList">
<div class="file-item">Loading files...</div>
</div>
<div class="progress-container" id="progressContainer">
<h3 id="currentFileName">Downloading file...</h3>
<div class="progress-bar">
<div class="progress" id="progressBar"></div>
</div>
<div class="progress-text" id="progressText">0%</div>
</div>
<div id="statusContainer"></div>
<script src="/socket.io/socket.io.js"></script>
<script src="/js/index.js"></script>
<script>
// Will be replaced with actual implementation from compiled client.ts
document.addEventListener('DOMContentLoaded', () => {
// Elements
const fileListEl = document.getElementById('fileList');
const progressContainer = document.getElementById('progressContainer');
const progressBar = document.getElementById('progressBar');
const progressText = document.getElementById('progressText');
const currentFileName = document.getElementById('currentFileName');
const refreshBtn = document.getElementById('refreshBtn');
const statusContainer = document.getElementById('statusContainer');
// Create client instance
const client = new WebRTCFileClient();
// Set up event handlers
client.onProgress((progress) => {
progressContainer.style.display = 'block';
progressBar.style.width = `${progress}%`;
progressText.innerText = `${Math.round(progress)}%`;
});
client.onComplete((file, fileName) => {
showStatus('success', `File "${fileName}" downloaded successfully!`);
client.saveFile(file, fileName);
progressContainer.style.display = 'none';
});
client.onError((error) => {
showStatus('error', `Error: ${error}`);
progressContainer.style.display = 'none';
});
client.onFilesList((files) => {
if (files.length === 0) {
fileListEl.innerHTML = '<div class="file-item">No files available</div>';
return;
}
fileListEl.innerHTML = '';
files.forEach(file => {
const fileItem = document.createElement('div');
fileItem.className = 'file-item';
// Create file info elements
const nameEl = document.createElement('span');
nameEl.textContent = file.name;
const sizeEl = document.createElement('span');
sizeEl.textContent = formatFileSize(file.size);
fileItem.appendChild(nameEl);
fileItem.appendChild(sizeEl);
fileItem.addEventListener('click', () => {
currentFileName.textContent = `Downloading ${file.name}`;
progressContainer.style.display = 'block';
progressBar.style.width = '0%';
progressText.innerText = '0%';
client.requestFile(file.name);
});
fileListEl.appendChild(fileItem);
});
});
// Get initial file list
client.getFilesList();
// Set up refresh button
refreshBtn.addEventListener('click', () => {
fileListEl.innerHTML = '<div class="file-item">Loading files...</div>';
client.getFilesList();
});
// Helper function to show status messages
function showStatus(type, message) {
const statusEl = document.createElement('div');
statusEl.className = `status ${type}`;
statusEl.textContent = message;
statusContainer.innerHTML = '';
statusContainer.appendChild(statusEl);
// Clear the status after 5 seconds
setTimeout(() => {
statusEl.remove();
}, 5000);
}
// Helper function to format file size
function formatFileSize(bytes) {
if (bytes === 0) return '0 Bytes';
const k = 1024;
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
}
});
</script>
</body>
</html>