"use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); require("dotenv/config"); const express_1 = __importDefault(require("express")); const promises_1 = __importDefault(require("fs/promises")); const fs_1 = require("fs"); const os_1 = require("os"); const path_1 = require("path"); const crypto_1 = require("crypto"); const sqlite3_1 = require("sqlite3"); const body_parser_1 = __importDefault(require("body-parser")); const multer_1 = __importDefault(require("multer")); const uuid_1 = require("uuid"); const mime_1 = require("mime"); const log_1 = require("./log"); const mail_1 = require("./mail"); const Handlebars = __importStar(require("handlebars")); const yoloWebUrl = typeof process.env['YOLO_WEB_URL'] !== 'undefined' ? process.env['YOLO_WEB_URL'] : 'http://localhost:3333'; const port = typeof process.env['PORT'] !== 'undefined' ? parseInt(process.env['PORT'], 10) : 3333; const data = typeof process.env['DATADIR'] !== 'undefined' ? process.env['DATADIR'] : './data'; const dbPath = (0, path_1.join)(data, 'queue.sqlite'); const app = (0, express_1.default)(); const tmp = (0, os_1.tmpdir)(); const db = new sqlite3_1.Database(dbPath); let log = (0, log_1.createLog)('server'); const accepted = [ 'application/octet-stream', 'multipart/x-zip', 'application/zip', 'application/zip-compressed', 'application/x-zip-compressed', ]; const storage = multer_1.default.diskStorage({ destination: function (req, file, cb) { cb(null, tmp); }, filename: function (req, file, cb) { cb(null, `${+new Date()}_${file.originalname}`); } }); function fileFilter(req, file, cb) { if (accepted.indexOf(file.mimetype) !== -1) { cb(null, true); } else { log.warn(`Filetype "${file.mimetype}" is not of type zip`); cb(new Error("Dataset is not of type zip"), false); } } const uploadZip = (0, multer_1.default)({ storage, fileFilter }); const uploadOnnx = (0, multer_1.default)({ storage, fileFilter }); app.use(body_parser_1.default.json()); app.use(body_parser_1.default.urlencoded({ extended: true })); async function createTemplate(filePath) { let tmpl; try { tmpl = await promises_1.default.readFile(filePath, 'utf8'); } catch (err) { log.error(err); return null; } return Handlebars.compile(tmpl); } let index; function hash(path) { return new Promise((resolve, reject) => { const hashSum = (0, crypto_1.createHash)('sha256'); const stream = (0, fs_1.createReadStream)(path); stream.on('error', (err) => reject(err)); stream.on('data', (chunk) => hashSum.update(chunk)); stream.on('end', () => resolve(hashSum.digest('hex'))); }); } async function exists(path) { try { await promises_1.default.access(path); return true; } catch { return false; } } async function add(email, name, dataset, model) { const query = `INSERT INTO queue (id, email, name, dataset, model) VALUES ( ?, ?, ?, ?, ?);`; const id = (0, uuid_1.v4)(); return new Promise((resolve, reject) => { return db.run(query, [id, email, name, dataset, model], (err, row) => { if (err) return reject(err); log.info(`Added job ${id} to queue`); return resolve(id); }); }); } async function status(id) { const query = `SELECT name, model, started, completed, failed, meta FROM queue WHERE id = ? LIMIT 1;`; let jobStatus = 'Unknown'; return new Promise((resolve, reject) => { return db.all(query, [id], (err, rows) => { if (err) return reject(err); if (rows.length !== 1) { return resolve(jobStatus); } const obj = rows[0]; if (rows[0].started === null) { jobStatus = `Has not started`; } else if (rows[0].failed !== null) { jobStatus = `Failed
${rows[0].meta}
`; } else if (rows[0].completed !== null) { jobStatus = `Completed ${rows[0].completed} Download`; } else if (rows[0].started !== null) { jobStatus = `Started ${rows[0].started}`; } log.info(`Got status for job ${id}: ${jobStatus}`); return resolve(jobStatus); }); }); } async function name(id) { const query = `SELECT name, meta FROM queue WHERE id = ? LIMIT 1;`; return new Promise((resolve, reject) => { return db.all(query, [id], (err, rows) => { if (err) return reject(err); if (rows.length < 1) { return reject(new Error(`Job ${id} does not exist`)); } return resolve(rows[0].name); }); }); } async function dataset(id) { const query = `SELECT dataset FROM queue WHERE id = ? LIMIT 1;`; return new Promise((resolve, reject) => { return db.all(query, [id], (err, rows) => { if (err) return reject(err); if (rows.length < 1) { return reject(new Error(`Dataset ${id} does not exist`)); } return resolve(rows[0].dataset); }); }); } async function job() { const query = `SELECT id FROM queue WHERE started IS NULL AND completed IS NULL AND failed IS NULL ORDER BY created ASC LIMIT 1;`; return new Promise((resolve, reject) => { return db.all(query, [], (err, rows) => { if (err) return reject(err); if (rows.length < 1) { return resolve([]); } return resolve([rows[0].id]); }); }); } async function jobs() { const query = `SELECT id FROM queue WHERE started IS NULL AND completed IS NULL AND failed IS NULL ORDER BY created ASC;`; return new Promise((resolve, reject) => { return db.all(query, [], (err, rows) => { if (err) return reject(err); return resolve(rows.map((el) => el.id)); }); }); } async function claim(id) { const query = `SELECT * FROM queue WHERE id = ? LIMIT 1;`; return new Promise((resolve, reject) => { return db.all(query, [id], (err, rows) => { if (err) return reject(err); if (rows.length < 1) { return reject(new Error(`Dataset ${id} does not exist`)); } if (rows[0].started !== null) { return reject(new Error(`Job ${id} is already claimed`)); } const claimQuery = `UPDATE queue SET started = CURRENT_TIMESTAMP WHERE id = ?;`; return db.run(claimQuery, [id], (err, row) => { if (err) return reject(err); return resolve(rows[0]); }); }); }); } async function get(id) { const query = `SELECT * FROM queue WHERE id = ? LIMIT 1;`; return new Promise((resolve, reject) => { return db.all(query, [id], (err, rows) => { if (err) return reject(err); if (rows.length < 1) { return reject(new Error(`Job ${id} does not exist`)); } return resolve(rows[0]); }); }); } async function fail(id, meta) { const query = `UPDATE queue SET failed = CURRENT_TIMESTAMP, meta = ? WHERE id = ?;`; return new Promise((resolve, reject) => { return db.run(query, [meta, id], (err, row) => { if (err) return reject(err); return resolve(true); }); }); } async function complete(id, meta) { const query = `UPDATE queue SET completed = CURRENT_TIMESTAMP, meta = ? WHERE id = ?;`; return new Promise((resolve, reject) => { return db.run(query, [meta, id], (err, row) => { if (err) return reject(err); return resolve(true); }); }); } async function all() { const query = `SELECT * FROM queue ORDER BY created DESC;`; return new Promise((resolve, reject) => { return db.all(query, [], (err, rows) => { if (err) return reject(err); return resolve(rows); }); }); } function annotate(row) { row.hasModel = row.completed != null; if (row.completed != null) { row.status = 'Completed'; } else if (row.failed != null) { row.status = 'Failed'; } else if (row.started != null) { row.status = 'Training'; } else { row.status = 'Waiting'; } return row; } async function alertClaimed(id, model, name, email) { const subject = `Training ${name} started`; const body = `

Your ${model} training job "${name}" has started!


Status is available here: ${yoloWebUrl}/job/${id}

You will receive an email when the training is complete.
`; try { await (0, mail_1.sendMail)(email, subject, body); } catch (err) { log.error('Error sending mail'); log.error(err); } } async function alertFailed(id, model, name, email) { const subject = `Training ${name} failed`; const body = `

Your ${model} training job "${name}" has failed :(


Additional information is available here: ${yoloWebUrl}/job/${id}

Please contact the administrator for more information
`; try { await (0, mail_1.sendMail)(email, subject, body); } catch (err) { log.error('Error sending mail'); log.error(err); } } async function alertCompleted(id, model, name, email) { const subject = `Training ${name} completed`; const body = `

Your ${model} training job "${name}" has completed!


The model is available for download here: ${yoloWebUrl}/model/${id}
`; try { await (0, mail_1.sendMail)(email, subject, body); } catch (err) { log.error('Error sending mail'); log.error(err); } } app.get('/', async (req, res, next) => { let html; let rows; let input; try { rows = await all(); } catch (err) { log.error(err); return next('ERROR: Could not retrieve jobs from queue'); } rows = rows.map(annotate); input = { rows, hasTable: typeof rows == 'undefined' ? false : rows.length > 0 }; html = index(input); res.send(html); }); app.post('/', uploadZip.single('dataset'), async (req, res, next) => { let fileHash; let filePath; let fileExists; let id; req.setTimeout(0); if (typeof req.file === 'undefined' || req.file === null) { log.error('No file in upload'); return next('ERROR: Please upload dataset as zip file'); } try { fileHash = await hash(req.file.path); } catch (err) { log.error(err); return next(`Error hashing file ${req.file.originalname}`); } filePath = (0, path_1.join)(data, `${fileHash}.zip`); try { fileExists = await exists(filePath); } catch (err) { log.error(err); } if (!fileExists) { try { await promises_1.default.copyFile(req.file.path, filePath); log.info(`Saved dataset with hash ${fileHash}`); } catch (err) { log.error(err); return next(err); } } else { log.warn(`Dataset with hash ${fileHash} already exists...`); } try { await promises_1.default.unlink(req.file.path); } catch (err) { log.error(err); } try { id = await add(req.body.email, req.body.name, fileHash, req.body.model); } catch (err) { log.info(err); return next(`Error adding training job ${req.body.name}`); } res.send(`Dataset for job ${req.body.name} has been uploaded successfully. You will be emailed when your job has started and when it has completed training.
Monitor job status here: ${id}`); }); app.post('/job/:id', uploadOnnx.single('model'), async (req, res, next) => { let filePath; let meta = null; let id; let jobObj; req.setTimeout(0); if (typeof req.file === 'undefined' || req.file === null) { log.error('No file in upload'); return next('ERROR: Please model as zip file'); } id = req.params.id; filePath = (0, path_1.join)(data, `${id}.zip`); if (typeof req.body.meta !== 'undefined') { meta = req.body.meta; } try { await promises_1.default.copyFile(req.file.path, filePath); log.info(`Saved model for job ${id}`); } catch (err) { log.error(err); return next(err); } try { await promises_1.default.unlink(req.file.path); } catch (err) { log.error(err); } try { await complete(id, meta); } catch (err) { log.error(err); return next(`Error completing training job ${id}`); } try { jobObj = await get(id); } catch (err) { log.error('Error getting job for alertCompleted'); log.error(err); } try { await alertCompleted(id, jobObj.model, jobObj.name, jobObj.email); } catch (err) { log.error('Error sending alertCompleted email'); } res.json({ id }); }); app.get('/job/:id', async (req, res, next) => { let jobStatus; if (typeof req.params.id === 'undefined' || req.params.id === null) { log.error(`No job id provided`); return next('Invalid request'); } if (req.params.id.length !== 36) { log.error(`Job id ${req.params.id} is invalid`); return next('Invalid job id'); } try { jobStatus = await status(req.params.id); } catch (err) { log.error(err); return next('Error getting job status'); } return res.send(`Job: ${req.params.id}
Status: ${jobStatus}`); }); app.get('/model/:id', async (req, res, next) => { let filePath; let fileExists = false; let id; let fileName; let fileStream; let mimeType; let stream; if (typeof req.params.id === 'undefined' || req.params.id === null) { log.error(`No job id provided`); return next('Invalid request'); } id = req.params.id; filePath = (0, path_1.join)(data, `${id}.zip`); try { fileExists = await exists(filePath); } catch (err) { log.error(err); return next(`Error checking whether model for job ${id} exists`); } if (!fileExists) { log.warn(`Model for job ${id} does not exist`); return next(`Model for job ${id} does not exist`); } try { fileName = await name(id); } catch (err) { log.error(err); return next(`Error getting job ${id}`); } mimeType = (0, mime_1.getType)(filePath); res.setHeader('Content-disposition', `attachment; filename=${fileName}.zip`); res.setHeader('Content-type', mimeType); stream = (0, fs_1.createReadStream)(filePath); stream.pipe(res); }); app.get('/dataset/:id', async (req, res, next) => { let filePath; let fileExists = false; let id; let datasetHash; let fileStream; let mimeType; let stream; if (typeof req.params.id === 'undefined' || req.params.id === null) { log.error(`No dataset id provided`); return next('Invalid request'); } id = req.params.id; try { datasetHash = await dataset(id); } catch (err) { log.error(err); return next(`Error getting dataset for job ${id}`); } filePath = (0, path_1.join)(data, `${datasetHash}.zip`); try { fileExists = await exists(filePath); } catch (err) { log.error(err); return next(`Error checking whether dataset for job ${id} exists`); } if (!fileExists) { log.warn(`Dataset for job ${id} does not exist`); return next(`Dataset for job ${id} does not exist`); } mimeType = (0, mime_1.getType)(filePath); res.setHeader('Content-disposition', `attachment; filename=${datasetHash}.zip`); res.setHeader('Content-type', mimeType); stream = (0, fs_1.createReadStream)(filePath); stream.pipe(res); }); app.get('/job', async (req, res, next) => { let jobArr; try { jobArr = await job(); } catch (err) { log.error(err); return next('Error getting job'); } res.json(jobArr); }); app.get('/jobs', async (req, res, next) => { let jobArr; try { jobArr = await jobs(); } catch (err) { log.error(err); return next('Error getting job'); } res.json(jobArr); }); app.post('/job/claim/:id', async (req, res, next) => { let id; let jobObj; let resObj = {}; if (typeof req.params.id === 'undefined' || req.params.id === null) { log.error(`No dataset id provided`); return next('Invalid request'); } id = req.params.id; try { jobObj = await claim(id); log.info(`Job ${id} was claimed`); } catch (err) { log.error(err); return next('Error claiming job'); } try { await alertClaimed(id, jobObj.model, jobObj.name, jobObj.email); } catch (err) { log.error(err); } resObj.id = id; resObj.path = `/dataset/${id}`; resObj.dataset = jobObj.dataset; resObj.model = jobObj.model; resObj.name = jobObj.name; resObj.email = jobObj.email; res.json(resObj); }); app.post('/job/fail/:id', async (req, res, next) => { let id; let meta = null; let jobObj; if (typeof req.params.id === 'undefined' || req.params.id === null) { log.error(`No dataset id provided`); return next('Invalid request'); } id = req.params.id; if (typeof req.body.meta !== 'undefined') { meta = req.body.meta; } try { await fail(id, meta); log.info(`Job ${id} failed`); } catch (err) { log.error(err); return next('Error failing job'); } try { jobObj = await get(id); } catch (err) { log.error('Error getting job for alertFailed'); log.error(err); } try { await alertFailed(id, jobObj.model, jobObj.name, jobObj.email); } catch (err) { log.error('Error sending alertFailed email'); } res.json(true); }); app.listen(port, async () => { index = await createTemplate('./views/index.hbs'); log.info(`yolo_web running on port ${port}`); }); //# sourceMappingURL=index.js.map