"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
require("dotenv/config");
const express_1 = __importDefault(require("express"));
const promises_1 = __importDefault(require("fs/promises"));
const fs_1 = require("fs");
const os_1 = require("os");
const path_1 = require("path");
const crypto_1 = require("crypto");
const sqlite3_1 = require("sqlite3");
const body_parser_1 = __importDefault(require("body-parser"));
const multer_1 = __importDefault(require("multer"));
const uuid_1 = require("uuid");
const mime_1 = require("mime");
const log_1 = require("./log");
const Handlebars = __importStar(require("handlebars"));
const port = typeof process.env['PORT'] !== 'undefined' ? parseInt(process.env['PORT'], 10) : 3333;
const data = typeof process.env['DATADIR'] !== 'undefined' ? process.env['DATADIR'] : './data';
const dbPath = (0, path_1.join)(data, 'queue.sqlite');
const app = (0, express_1.default)();
const tmp = (0, os_1.tmpdir)();
const db = new sqlite3_1.Database(dbPath);
let log = (0, log_1.createLog)('server');
const accepted = ['application/zip', 'application/x-zip-compressed'];
const storage = multer_1.default.diskStorage({
destination: function (req, file, cb) {
cb(null, tmp);
},
filename: function (req, file, cb) {
cb(null, `${+new Date()}_${file.originalname}`);
}
});
function fileFilter(req, file, cb) {
if (accepted.indexOf(file.mimetype) !== -1) {
cb(null, true);
}
else {
log.warn(`Filetype ${file.mimetype} is not of type zip`);
cb(new Error("Dataset is not of type zip"), false);
}
}
const uploadZip = (0, multer_1.default)({ storage, fileFilter });
const uploadOnnx = (0, multer_1.default)({ storage, fileFilter });
app.use(body_parser_1.default.json());
app.use(body_parser_1.default.urlencoded({ extended: true }));
async function createTemplate(filePath) {
let tmpl;
try {
tmpl = await promises_1.default.readFile(filePath, 'utf8');
}
catch (err) {
log.error(err);
return null;
}
return Handlebars.compile(tmpl);
}
let index;
function hash(path) {
return new Promise((resolve, reject) => {
const hashSum = (0, crypto_1.createHash)('sha256');
const stream = (0, fs_1.createReadStream)(path);
stream.on('error', (err) => reject(err));
stream.on('data', (chunk) => hashSum.update(chunk));
stream.on('end', () => resolve(hashSum.digest('hex')));
});
}
async function exists(path) {
try {
await promises_1.default.access(path);
return true;
}
catch {
return false;
}
}
async function add(email, name, dataset, model) {
const query = `INSERT INTO queue
(id, email, name, dataset, model)
VALUES ( ?, ?, ?, ?, ?);`;
const id = (0, uuid_1.v4)();
return new Promise((resolve, reject) => {
return db.run(query, [id, email, name, dataset, model], (err, row) => {
if (err)
return reject(err);
log.info(`Added job ${id} to queue`);
return resolve(id);
});
});
}
async function status(id) {
const query = `SELECT name, model, started, completed, failed, meta FROM queue WHERE id = ? LIMIT 1;`;
let jobStatus = 'Unknown';
return new Promise((resolve, reject) => {
return db.all(query, [id], (err, rows) => {
if (err)
return reject(err);
if (rows.length !== 1) {
return resolve(jobStatus);
}
const obj = rows[0];
if (rows[0].started === null) {
jobStatus = `Has not started`;
}
else if (rows[0].failed !== null) {
jobStatus = `Failed
${rows[0].meta}`; } else if (rows[0].completed !== null) { jobStatus = `Completed ${rows[0].completed} Download`; } else if (rows[0].started !== null) { jobStatus = `Started ${rows[0].started}`; } log.info(`Got status for job ${id}: ${jobStatus}`); return resolve(jobStatus); }); }); } async function name(id) { const query = `SELECT name, meta FROM queue WHERE id = ? LIMIT 1;`; return new Promise((resolve, reject) => { return db.all(query, [id], (err, rows) => { if (err) return reject(err); if (rows.length < 1) { return reject(new Error(`Job ${id} does not exist`)); } return resolve(rows[0].name); }); }); } async function dataset(id) { const query = `SELECT dataset FROM queue WHERE id = ? LIMIT 1;`; return new Promise((resolve, reject) => { return db.all(query, [id], (err, rows) => { if (err) return reject(err); if (rows.length < 1) { return reject(new Error(`Dataset ${id} does not exist`)); } return resolve(rows[0].dataset); }); }); } async function job() { const query = `SELECT id FROM queue WHERE started IS NULL AND completed IS NULL AND failed IS NULL ORDER BY created ASC LIMIT 1;`; return new Promise((resolve, reject) => { return db.all(query, [], (err, rows) => { if (err) return reject(err); if (rows.length < 1) { return resolve([]); } return resolve([rows[0].id]); }); }); } async function jobs() { const query = `SELECT id FROM queue WHERE started IS NULL AND completed IS NULL AND failed IS NULL ORDER BY created ASC;`; return new Promise((resolve, reject) => { return db.all(query, [], (err, rows) => { if (err) return reject(err); return resolve(rows.map((el) => el.id)); }); }); } async function claim(id) { const query = `SELECT * FROM queue WHERE id = ? LIMIT 1;`; return new Promise((resolve, reject) => { return db.all(query, [id], (err, rows) => { if (err) return reject(err); if (rows.length < 1) { return reject(new Error(`Dataset ${id} does not exist`)); } if (rows[0].started !== null) { return reject(new Error(`Job ${id} is already claimed`)); } const claimQuery = `UPDATE queue SET started = CURRENT_TIMESTAMP WHERE id = ?;`; return db.run(claimQuery, [id], (err, row) => { if (err) return reject(err); return resolve(rows[0]); }); }); }); } async function fail(id, meta) { const query = `UPDATE queue SET failed = CURRENT_TIMESTAMP, meta = ? WHERE id = ?;`; return new Promise((resolve, reject) => { return db.run(query, [meta, id], (err, row) => { if (err) return reject(err); return resolve(true); }); }); } async function complete(id, meta) { const query = `UPDATE queue SET completed = CURRENT_TIMESTAMP, meta = ? WHERE id = ?;`; return new Promise((resolve, reject) => { return db.run(query, [meta, id], (err, row) => { if (err) return reject(err); return resolve(true); }); }); } async function all() { const query = `SELECT * FROM queue ORDER BY created DESC;`; return new Promise((resolve, reject) => { return db.all(query, [], (err, rows) => { if (err) return reject(err); return resolve(rows); }); }); } function annotate(row) { row.hasModel = row.completed != null; if (row.completed != null) { row.status = 'Completed'; } else if (row.failed != null) { row.status = 'Failed'; } else if (row.started != null) { row.status = 'Training'; } else { row.status = 'Waiting'; } return row; } app.get('/', async (req, res, next) => { let html; let rows; let input; try { rows = await all(); } catch (err) { log.error(err); return next('ERROR: Could not retrieve jobs from queue'); } rows = rows.map(annotate); input = { rows, hasTable: typeof rows == 'undefined' ? false : rows.length > 0 }; html = index(input); res.send(html); }); app.post('/', uploadZip.single('dataset'), async (req, res, next) => { let fileHash; let filePath; let fileExists; let id; req.setTimeout(0); if (typeof req.file === 'undefined' || req.file === null) { log.error('No file in upload'); return next('ERROR: Please upload dataset as zip file'); } try { fileHash = await hash(req.file.path); } catch (err) { log.error(err); return next(`Error hashing file ${req.file.originalname}`); } filePath = (0, path_1.join)(data, `${fileHash}.zip`); try { fileExists = await exists(filePath); } catch (err) { log.error(err); } if (!fileExists) { try { await promises_1.default.copyFile(req.file.path, filePath); log.info(`Saved dataset with hash ${fileHash}`); } catch (err) { log.error(err); return next(err); } } else { log.warn(`Dataset with hash ${fileHash} already exists...`); } try { await promises_1.default.unlink(req.file.path); } catch (err) { log.error(err); } try { id = await add(req.body.email, req.body.name, fileHash, req.body.model); } catch (err) { log.info(err); return next(`Error adding training job ${req.body.name}`); } res.send(`Dataset for job ${req.body.name} has been uploaded successfully. You will be emailed when your job has started and when it has completed training.