yolo_web/dist/index.js

470 lines
15 KiB
JavaScript

"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const express_1 = __importDefault(require("express"));
const promises_1 = __importDefault(require("fs/promises"));
const fs_1 = require("fs");
const os_1 = require("os");
const path_1 = require("path");
const crypto_1 = require("crypto");
const sqlite3_1 = require("sqlite3");
const body_parser_1 = __importDefault(require("body-parser"));
const multer_1 = __importDefault(require("multer"));
const uuid_1 = require("uuid");
const mime_1 = require("mime");
const port = typeof process.env['PORT'] !== 'undefined' ? parseInt(process.env['PORT'], 10) : 3333;
const data = typeof process.env['DATADIR'] !== 'undefined' ? process.env['DATADIR'] : './data';
const dbPath = (0, path_1.join)(data, 'queue.sqlite');
const app = (0, express_1.default)();
const tmp = (0, os_1.tmpdir)();
const db = new sqlite3_1.Database(dbPath);
const accepted = ['application/zip', 'application/x-zip-compressed'];
const storage = multer_1.default.diskStorage({
destination: function (req, file, cb) {
cb(null, tmp);
},
filename: function (req, file, cb) {
cb(null, `${+new Date()}_${file.originalname}`);
}
});
function fileFilter(req, file, cb) {
if (accepted.indexOf(file.mimetype) !== -1) {
cb(null, true);
}
else {
console.warn(`Filetype ${file.mimetype} is not of type zip`);
cb(new Error("Dataset is not of type zip"), false);
}
}
const uploadZip = (0, multer_1.default)({ storage, fileFilter });
const uploadOnnx = (0, multer_1.default)({ storage });
app.use(body_parser_1.default.json());
app.use(body_parser_1.default.urlencoded({ extended: true }));
function hash(path) {
return new Promise((resolve, reject) => {
const hashSum = (0, crypto_1.createHash)('sha256');
const stream = (0, fs_1.createReadStream)(path);
stream.on('error', (err) => reject(err));
stream.on('data', (chunk) => hashSum.update(chunk));
stream.on('end', () => resolve(hashSum.digest('hex')));
});
}
async function exists(path) {
try {
await promises_1.default.access(path);
return true;
}
catch {
return false;
}
}
async function add(email, name, dataset, model) {
const query = `INSERT INTO queue
(id, email, name, dataset, model)
VALUES ( ?, ?, ?, ?, ?);`;
const id = (0, uuid_1.v4)();
return new Promise((resolve, reject) => {
return db.run(query, [id, email, name, dataset, model], (err, row) => {
if (err)
return reject(err);
console.log(`Added job ${id} to queue`);
return resolve(id);
});
});
}
async function status(id) {
const query = `SELECT name, model, started, completed, failed, meta FROM queue WHERE id = ? LIMIT 1;`;
let jobStatus = 'Unknown';
return new Promise((resolve, reject) => {
return db.all(query, [id], (err, rows) => {
if (err)
return reject(err);
if (rows.length !== 1) {
return resolve(jobStatus);
}
const obj = rows[0];
if (rows[0].started === null) {
jobStatus = `Has not started`;
}
else if (rows[0].failed !== null) {
jobStatus = `Failed <br /> <pre>${rows[0].meta}</pre>`;
}
else if (rows[0].completed !== null) {
jobStatus = `Completed ${rows[0].completed} <a href="/model/${id}">Download</a>`;
}
else if (rows[0].started !== null) {
jobStatus = `Started ${rows[0].started}`;
}
console.log(`Got status for job ${id}: ${jobStatus}`);
return resolve(jobStatus);
});
});
}
async function name(id) {
const query = `SELECT name, meta FROM queue WHERE id = ? LIMIT 1;`;
return new Promise((resolve, reject) => {
return db.all(query, [id], (err, rows) => {
if (err)
return reject(err);
if (rows.length < 1) {
return reject(new Error(`Job ${id} does not exist`));
}
return resolve(rows[0].name);
});
});
}
async function dataset(id) {
const query = `SELECT dataset FROM queue WHERE id = ? LIMIT 1;`;
return new Promise((resolve, reject) => {
return db.all(query, [id], (err, rows) => {
if (err)
return reject(err);
if (rows.length < 1) {
return reject(new Error(`Dataset ${id} does not exist`));
}
return resolve(rows[0].dataset);
});
});
}
async function job() {
const query = `SELECT id FROM queue WHERE
started IS NULL
AND completed IS NULL
AND failed IS NULL
ORDER BY created ASC
LIMIT 1;`;
return new Promise((resolve, reject) => {
return db.all(query, [], (err, rows) => {
if (err)
return reject(err);
if (rows.length < 1) {
return resolve([]);
}
return resolve([rows[0].id]);
});
});
}
async function jobs() {
const query = `SELECT id FROM queue WHERE
started IS NULL
AND completed IS NULL
AND failed IS NULL
ORDER BY created ASC;`;
return new Promise((resolve, reject) => {
return db.all(query, [], (err, rows) => {
if (err)
return reject(err);
return resolve(rows.map((el) => el.id));
});
});
}
async function claim(id) {
const query = `SELECT * FROM queue WHERE id = ? LIMIT 1;`;
return new Promise((resolve, reject) => {
return db.all(query, [id], (err, rows) => {
if (err)
return reject(err);
if (rows.length < 1) {
return reject(new Error(`Dataset ${id} does not exist`));
}
if (rows[0].started !== null) {
return reject(new Error(`Job ${id} is already claimed`));
}
const claimQuery = `UPDATE queue SET started = CURRENT_TIMESTAMP WHERE id = ?;`;
return db.run(claimQuery, [id], (err, row) => {
if (err)
return reject(err);
return resolve(rows[0]);
});
});
});
}
async function fail(id, meta) {
const query = `UPDATE queue SET failed = CURRENT_TIMESTAMP, meta = ? WHERE id = ?;`;
return new Promise((resolve, reject) => {
return db.run(query, [meta, id], (err, row) => {
if (err)
return reject(err);
return resolve(true);
});
});
}
async function complete(id, meta) {
const query = `UPDATE queue SET completed = CURRENT_TIMESTAMP, meta = ? WHERE id = ?;`;
return new Promise((resolve, reject) => {
return db.run(query, [meta, id], (err, row) => {
if (err)
return reject(err);
return resolve(true);
});
});
}
app.get('/', async (req, res, next) => {
let html;
try {
html = await promises_1.default.readFile('./views/index.html', 'utf8');
}
catch (err) {
console.error(err);
return next(err);
}
res.send(html);
});
app.post('/', uploadZip.single('dataset'), async (req, res, next) => {
let fileHash;
let filePath;
let fileExists;
let id;
req.setTimeout(0);
if (typeof req.file === 'undefined' || req.file === null) {
console.error('No file in upload');
return next('ERROR: Please upload dataset as zip file');
}
try {
fileHash = await hash(req.file.path);
}
catch (err) {
console.error(err);
return next(`Error hashing file ${req.file.originalname}`);
}
filePath = (0, path_1.join)(data, `${fileHash}.zip`);
try {
fileExists = await exists(filePath);
}
catch (err) {
console.error(err);
}
if (!fileExists) {
try {
await promises_1.default.copyFile(req.file.path, filePath);
console.log(`Saved dataset with hash ${fileHash}`);
}
catch (err) {
console.error(err);
return next(err);
}
}
else {
console.warn(`Dataset with hash ${fileHash} already exists...`);
}
try {
await promises_1.default.unlink(req.file.path);
}
catch (err) {
console.error(err);
}
try {
id = await add(req.body.email, req.body.name, fileHash, req.body.model);
}
catch (err) {
console.log(err);
return next(`Error adding training job ${req.body.name}`);
}
res.send(`<html><body>Dataset for job ${req.body.name} has been uploaded successfully. You will be emailed when your job has started and when it has completed training. <br /> Monitor job status here: <a href="/job/${id}">${id}</a></body></html>`);
});
app.post('/job/:id', uploadOnnx.single('model'), async (req, res, next) => {
let filePath;
let meta = null;
let id;
req.setTimeout(0);
if (typeof req.file === 'undefined' || req.file === null) {
console.error('No file in upload');
return next('ERROR: Please model as ONNX file');
}
id = req.params.id;
filePath = (0, path_1.join)(data, `${id}.onnx`);
if (typeof req.body.meta !== 'undefined') {
meta = req.body.meta;
}
try {
await promises_1.default.copyFile(req.file.path, filePath);
console.log(`Saved model for job ${id}`);
}
catch (err) {
console.error(err);
return next(err);
}
try {
await promises_1.default.unlink(req.file.path);
}
catch (err) {
console.error(err);
}
try {
await complete(id, meta);
}
catch (err) {
console.log(err);
return next(`Error completing training job ${id}`);
}
res.json({ id });
});
app.get('/job/:id', async (req, res, next) => {
let jobStatus;
if (typeof req.params.id === 'undefined' || req.params.id === null) {
console.error(`No job id provided`);
return next('Invalid request');
}
if (req.params.id.length !== 36) {
console.error(`Job id ${req.params.id} is invalid`);
return next('Invalid job id');
}
try {
jobStatus = await status(req.params.id);
}
catch (err) {
console.error(err);
return next('Error getting job status');
}
return res.send(`<html><body>Job: ${req.params.id}<br /> Status: ${jobStatus}</body></html>`);
});
app.get('/model/:id', async (req, res, next) => {
let filePath;
let fileExists = false;
let id;
let fileName;
let fileStream;
let mimeType;
let stream;
if (typeof req.params.id === 'undefined' || req.params.id === null) {
console.error(`No job id provided`);
return next('Invalid request');
}
id = req.params.id;
filePath = (0, path_1.join)(data, `${id}.onnx`);
try {
fileExists = await exists(filePath);
}
catch (err) {
console.error(err);
return next(`Error checking whether model for job ${id} exists`);
}
if (!fileExists) {
console.warn(`Model for job ${id} does not exist`);
return next(`Model for job ${id} does not exist`);
}
try {
fileName = await name(id);
}
catch (err) {
console.error(err);
return next(`Error getting job ${id}`);
}
mimeType = (0, mime_1.getType)(filePath);
res.setHeader('Content-disposition', `attachment; filename=${fileName}.onnx`);
res.setHeader('Content-type', mimeType);
stream = (0, fs_1.createReadStream)(filePath);
stream.pipe(res);
});
app.get('/dataset/:id', async (req, res, next) => {
let filePath;
let fileExists = false;
let id;
let datasetHash;
let fileStream;
let mimeType;
let stream;
if (typeof req.params.id === 'undefined' || req.params.id === null) {
console.error(`No dataset id provided`);
return next('Invalid request');
}
id = req.params.id;
try {
datasetHash = await dataset(id);
}
catch (err) {
console.error(err);
return next(`Error getting dataset for job ${id}`);
}
filePath = (0, path_1.join)(data, `${datasetHash}.zip`);
try {
fileExists = await exists(filePath);
}
catch (err) {
console.error(err);
return next(`Error checking whether dataset for job ${id} exists`);
}
if (!fileExists) {
console.warn(`Dataset for job ${id} does not exist`);
return next(`Dataset for job ${id} does not exist`);
}
mimeType = (0, mime_1.getType)(filePath);
res.setHeader('Content-disposition', `attachment; filename=${datasetHash}.zip`);
res.setHeader('Content-type', mimeType);
stream = (0, fs_1.createReadStream)(filePath);
stream.pipe(res);
});
app.get('/job', async (req, res, next) => {
let jobArr;
try {
jobArr = await job();
}
catch (err) {
console.error(err);
return next('Error getting job');
}
res.json(jobArr);
});
app.get('/jobs', async (req, res, next) => {
let jobArr;
try {
jobArr = await jobs();
}
catch (err) {
console.error(err);
return next('Error getting job');
}
res.json(jobArr);
});
app.post('/job/claim/:id', async (req, res, next) => {
let id;
let jobObj;
let resObj = {};
if (typeof req.params.id === 'undefined' || req.params.id === null) {
console.error(`No dataset id provided`);
return next('Invalid request');
}
id = req.params.id;
try {
jobObj = await claim(id);
console.log(`Job ${id} was claimed`);
}
catch (err) {
console.error(err);
return next('Error claiming job');
}
resObj.id = id;
resObj.path = `/dataset/${id}`;
resObj.dataset = jobObj.dataset;
resObj.model = jobObj.model;
resObj.name = jobObj.name;
resObj.email = jobObj.email;
res.json(resObj);
});
app.post('/job/fail/:id', async (req, res, next) => {
let id;
let meta = null;
if (typeof req.params.id === 'undefined' || req.params.id === null) {
console.error(`No dataset id provided`);
return next('Invalid request');
}
id = req.params.id;
if (typeof req.body.meta !== 'undefined') {
meta = req.body.meta;
}
try {
await fail(id, meta);
console.log(`Job ${id} failed`);
}
catch (err) {
console.error(err);
return next('Error failing job');
}
res.json(true);
});
app.listen(port, () => {
console.log(`yolo_web running on port ${port}`);
});
//# sourceMappingURL=index.js.map