All work from tonight. Almost there
This commit is contained in:
parent
1e789a16cc
commit
d5b21a137e
|
@ -2,6 +2,9 @@ INBOX="~/Photos/toprocess"
|
|||
PHOTOS="~/Photos/photosite"
|
||||
WWW="./www"
|
||||
TEMPLATES="./views"
|
||||
S3_KEY=""
|
||||
S3_ACCESS_KEY=""
|
||||
S3_ACCESS_SECRET=""
|
||||
S3_BUCKET=""
|
||||
S3_ENDPOINT=""
|
||||
UMAMI=""
|
||||
DB="data/site.db"
|
|
@ -0,0 +1,25 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.DB = void 0;
|
||||
require("dotenv/config");
|
||||
const log_1 = require("../log");
|
||||
const sqlite3_1 = require("sqlite3");
|
||||
const env_1 = require("../env");
|
||||
class DB {
|
||||
constructor() {
|
||||
this.log = (0, log_1.createLog)('db');
|
||||
this.db = new sqlite3_1.Database((0, env_1.envString)('DB', 'data/site.db'));
|
||||
}
|
||||
async run(query, args = null) {
|
||||
return new Promise((resolve, reject) => {
|
||||
return this.db.run(query, args, (err, rows) => {
|
||||
if (err)
|
||||
return reject(err);
|
||||
return resolve(true);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.DB = DB;
|
||||
module.exports = { DB };
|
||||
//# sourceMappingURL=index.js.map
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/db/index.ts"],"names":[],"mappings":";;;AAAA,yBAAuB;AAEvB,gCAAmC;AAEnC,qCAAmC;AACnC,gCAAmC;AAYnC,MAAa,EAAE;IAId;QACC,IAAI,CAAC,GAAG,GAAG,IAAA,eAAS,EAAC,IAAI,CAAC,CAAC;QAC3B,IAAI,CAAC,EAAE,GAAG,IAAI,kBAAQ,CAAC,IAAA,eAAS,EAAC,IAAI,EAAE,cAAc,CAAC,CAAC,CAAC;IACzD,CAAC;IAEO,KAAK,CAAC,GAAG,CAAE,KAAc,EAAE,OAAe,IAAI;QACrD,OAAO,IAAI,OAAO,CAAC,CAAC,OAAkB,EAAE,MAAiB,EAAE,EAAE;YAC5D,OAAO,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,KAAK,EAAE,IAAI,EAAE,CAAC,GAAW,EAAE,IAAY,EAAE,EAAE;gBAC7D,IAAI,GAAG;oBAAE,OAAO,MAAM,CAAC,GAAG,CAAC,CAAC;gBAC5B,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC;YACtB,CAAC,CAAC,CAAC;QACJ,CAAC,CAAC,CAAC;IACJ,CAAC;CAED;AAlBD,gBAkBC;AAED,MAAM,CAAC,OAAO,GAAG,EAAE,EAAE,EAAE,CAAC"}
|
|
@ -0,0 +1,16 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.envString = envString;
|
||||
exports.envFloat = envFloat;
|
||||
exports.envInt = envInt;
|
||||
function envString(variable, defaultString) {
|
||||
return typeof process.env[variable] !== 'undefined' ? process.env[variable] : defaultString;
|
||||
}
|
||||
function envFloat(variable, defaultFloat) {
|
||||
return typeof process.env[variable] !== 'undefined' ? parseFloat(process.env[variable]) : defaultFloat;
|
||||
}
|
||||
function envInt(variable, defaultInt) {
|
||||
return typeof process.env[variable] !== 'undefined' ? parseInt(process.env[variable]) : defaultInt;
|
||||
}
|
||||
module.exports = { envString, envFloat, envInt };
|
||||
//# sourceMappingURL=index.js.map
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/env/index.ts"],"names":[],"mappings":";;AAAA,8BAEC;AAED,4BAEC;AAED,wBAEC;AAVD,SAAgB,SAAS,CAAE,QAAiB,EAAE,aAAsB;IACnE,OAAO,OAAO,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,KAAK,WAAW,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC;AAC7F,CAAC;AAED,SAAgB,QAAQ,CAAE,QAAiB,EAAE,YAAqB;IACjE,OAAO,OAAO,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,KAAK,WAAW,CAAC,CAAC,CAAC,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC;AACxG,CAAC;AAED,SAAgB,MAAM,CAAE,QAAiB,EAAE,UAAmB;IAC7D,OAAO,OAAO,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,KAAK,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC;AACpG,CAAC;AAED,MAAM,CAAC,OAAO,GAAG,EAAE,SAAS,EAAE,QAAQ,EAAE,MAAM,EAAE,CAAC"}
|
|
@ -0,0 +1,477 @@
|
|||
'use strict';
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const uuid_1 = require("uuid");
|
||||
const path_1 = require("path");
|
||||
const promises_1 = require("fs/promises");
|
||||
const fs_1 = require("fs");
|
||||
const crypto_1 = require("crypto");
|
||||
const mime = __importStar(require("mime"));
|
||||
const aws_sdk_1 = require("aws-sdk");
|
||||
const os_1 = require("os");
|
||||
const s3Stream = require("s3-upload-stream");
|
||||
//import * as sharp from 'sharp';
|
||||
//const log = require('log')('file');
|
||||
const TMP_DIR = (typeof process.env.FILE_DIR !== 'undefined') ? process.env.TMP_DIR : ((0, os_1.tmpdir)() || '/tmp');
|
||||
class Files {
|
||||
/**
|
||||
* @constructor
|
||||
*
|
||||
*/
|
||||
constructor(bucket, writeable = true) {
|
||||
this.writeable = false;
|
||||
const S3_ACCESS_KEY = process.env.S3_ACCESS_KEY || 'YOUR-ACCESSKEYID';
|
||||
const S3_ACCESS_SECRET = process.env.S3_ACCESS_SECRET || 'YOUR-SECRETACCESSKEY';
|
||||
const S3_ENDPOINT = process.env.S3_ENDPOINT || 'http://127.0.0.1:9000';
|
||||
const spacesEndpoint = new aws_sdk_1.Endpoint(S3_ENDPOINT);
|
||||
const s3Config = {
|
||||
accessKeyId: S3_ACCESS_KEY,
|
||||
secretAccessKey: S3_ACCESS_SECRET,
|
||||
endpoint: spacesEndpoint,
|
||||
signatureVersion: 'v4'
|
||||
};
|
||||
this.endpoint = S3_ENDPOINT;
|
||||
this.s3 = new aws_sdk_1.S3(s3Config);
|
||||
this.s3Stream = s3Stream(this.s3);
|
||||
this.bucket = bucket;
|
||||
this.writeable = writeable;
|
||||
}
|
||||
/**
|
||||
* Create a SHA256 hash of any data provided.
|
||||
**/
|
||||
hash(data) {
|
||||
return (0, crypto_1.createHash)('sha256').update(data).digest('base64');
|
||||
}
|
||||
async exists(path) {
|
||||
try {
|
||||
await (0, promises_1.access)(path);
|
||||
return true;
|
||||
}
|
||||
catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Read file from disk as buffer and create hash of the data.
|
||||
**/
|
||||
async hashFile(filePath) {
|
||||
let data;
|
||||
try {
|
||||
data = await (0, promises_1.readFile)(filePath);
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
return this.hash(data);
|
||||
}
|
||||
/**
|
||||
* create a file object on an S3 bucket and upload data.
|
||||
* Reads into memory
|
||||
**/
|
||||
async create(file, keyName) {
|
||||
if (!this.writeable)
|
||||
return false;
|
||||
const id = await this.hashFile(file);
|
||||
const ext = mime.getExtension(file.mimetype);
|
||||
const key = typeof keyName !== 'undefined' ? keyName : `${id}.${ext}`;
|
||||
const webPath = (0, path_1.join)('/files/', this.bucket, key);
|
||||
const publicPath = (0, path_1.join)(`${this.bucket}.${this.endpoint}`, key);
|
||||
const record = {
|
||||
id,
|
||||
created: +new Date(),
|
||||
name: file.originalname,
|
||||
public: publicPath,
|
||||
path: webPath,
|
||||
path_hash: this.hash(webPath),
|
||||
hash: null,
|
||||
type: file.mimetype,
|
||||
size: null
|
||||
};
|
||||
const params = {
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
Body: null
|
||||
};
|
||||
params.Body = file.buffer;
|
||||
record.hash = this.hash(file.buffer);
|
||||
record.size = file.buffer.byteLength;
|
||||
return new Promise((resolve, reject) => {
|
||||
return this.s3.putObject(params, function (err, data) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
return reject(err);
|
||||
}
|
||||
else {
|
||||
console.log(`Saved file ${record.path}`);
|
||||
return resolve(record);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
/*
|
||||
* Create an s3 record using only a path reference of a local file
|
||||
* Reads into memory
|
||||
*/
|
||||
async createFromPath(filePath, keyName) {
|
||||
const filename = (0, path_1.basename)(filePath);
|
||||
const mimetype = getType(filePath);
|
||||
const key = typeof keyName !== 'undefined' ? keyName : null;
|
||||
let file;
|
||||
let buffer;
|
||||
try {
|
||||
buffer = await (0, promises_1.readFile)(filePath);
|
||||
}
|
||||
catch (err) {
|
||||
console.error('createFromPath', err);
|
||||
}
|
||||
file = {
|
||||
buffer,
|
||||
mimetype,
|
||||
originalname: filename
|
||||
};
|
||||
return this.create(file, key);
|
||||
}
|
||||
/**
|
||||
* Create an s3 record from a stream
|
||||
* Pass "createReadStream('')" object
|
||||
**/
|
||||
/*public async createStream (file : any) {
|
||||
if (!this.writeable) return false;
|
||||
const id : string = uuid();
|
||||
const ext : string = getExtension(file.mimetype);
|
||||
const key : string = `${id}.${ext}`;
|
||||
const webPath : string = pathJoin('/files/', this.bucket, key);
|
||||
const publicPath : string = pathJoin(`${this.bucket}.${this.endpoint}`, key);
|
||||
const record : FileRecord = {
|
||||
id,
|
||||
created : +new Date(),
|
||||
name : file.originalname,
|
||||
public : publicPath,
|
||||
path : webPath,
|
||||
path_hash : this.hash(webPath),
|
||||
hash : null,
|
||||
type : file.mimetype,
|
||||
size : null
|
||||
};
|
||||
const params : S3Params = {
|
||||
Bucket: this.bucket,
|
||||
Key: key
|
||||
};
|
||||
const upload = this.s3Stream.upload(params);
|
||||
|
||||
upload.maxPartSize(20971520); // 20 MB
|
||||
upload.concurrentParts(5);
|
||||
|
||||
return new Promise((resolve : Function, reject : Function) => {
|
||||
upload.on('error', (err : Error) => {
|
||||
return reject(err);
|
||||
});
|
||||
upload.on('part', (details : any) => {
|
||||
console.log(`${details.ETag} - part: ${details.PartNumber} received: ${details.receivedSize} uploaded: ${details.uploadedSize}`)
|
||||
});
|
||||
upload.on('uploaded', (details : any) => {
|
||||
record.hash = details.ETag;
|
||||
record.size = details.uploadedSize;
|
||||
console.log(`Saved file ${record.path}`);
|
||||
return resolve(record);
|
||||
});
|
||||
console.log(`Streaming ${record.path} to S3`);
|
||||
stream.pipe(upload);
|
||||
});
|
||||
}*/
|
||||
/**
|
||||
* Create a stream . Bind to busboy.on('file', files3.createStream)
|
||||
* ex. (express POST route callback)
|
||||
* var busboy = new Busboy({ headers: req.headers });
|
||||
* busboy.on('file', files3.createStream)
|
||||
* req.pipe(busboy);
|
||||
**/
|
||||
/* public async createStreamExpress (fieldname : string, file : any, filename : string, encoding : any, mimetype : string) {
|
||||
if (!this.writeable) return false;
|
||||
const id : string = uuid();
|
||||
const ext : string = getExtension(mimetype);
|
||||
const key : string = `${id}.${ext}`;
|
||||
const webPath : string = pathJoin('/files/', this.bucket, key);
|
||||
const publicPath : string = pathJoin(`${this.bucket}.${this.endpoint}`, key);
|
||||
const record : FileRecord = {
|
||||
id,
|
||||
created : +new Date(),
|
||||
name : filename,
|
||||
public : publicPath,
|
||||
path : webPath,
|
||||
path_hash : this.hash(webPath),
|
||||
hash : null,
|
||||
type : file.mimetype,
|
||||
size : null
|
||||
};
|
||||
const params : S3Params = {
|
||||
Bucket: this.bucket,
|
||||
Key: key
|
||||
};
|
||||
const upload = this.s3Stream.upload(params);
|
||||
|
||||
upload.maxPartSize(20971520); // 20 MB
|
||||
upload.concurrentParts(5);
|
||||
|
||||
return new Promise((resolve : Function, reject : Function) => {
|
||||
var s3 = new AWS.S3({
|
||||
params: {Bucket: 'sswa', Key: filename, Body: file},
|
||||
options: {partSize: 5 * 1024 * 1024, queueSize: 10} // 5 MB
|
||||
});
|
||||
s3.upload().on('httpUploadProgress', function (evt) {
|
||||
console.log(evt);
|
||||
}).send(function (err, data) {
|
||||
s3UploadFinishTime = new Date();
|
||||
if(busboyFinishTime && s3UploadFinishTime) {
|
||||
res.json({
|
||||
uploadStartTime: uploadStartTime,
|
||||
busboyFinishTime: busboyFinishTime,
|
||||
s3UploadFinishTime: s3UploadFinishTime
|
||||
});
|
||||
}
|
||||
console.log(err, data);
|
||||
});
|
||||
file.on('data', ( data : any ) => {
|
||||
upload.on('error', (err : Error) => {
|
||||
return reject(err);
|
||||
});
|
||||
upload.on('part', (details : any) => {
|
||||
console.log(`${details.ETag} - part: ${details.PartNumber} received: ${details.receivedSize} uploaded: ${details.uploadedSize}`)
|
||||
});
|
||||
upload.on('uploaded', (details : any) => {
|
||||
record.hash = details.ETag;
|
||||
record.size = details.uploadedSize;
|
||||
console.log(`Saved file ${record.path}`)
|
||||
return resolve(record)
|
||||
});
|
||||
data.pipe(upload);
|
||||
})
|
||||
});
|
||||
}*/
|
||||
/**
|
||||
* Create a stream from a path on the local device
|
||||
*
|
||||
* @param {string} filePath Path to file
|
||||
* @param {string} keyName (optional) Predefined key
|
||||
**/
|
||||
async createStreamFromPath(filePath, keyName) {
|
||||
if (!this.writeable)
|
||||
return false;
|
||||
const id = (0, uuid_1.v4)();
|
||||
const fileName = (0, path_1.basename)(filePath);
|
||||
const mimetype = getType(filePath);
|
||||
const ext = getExtension(fileName);
|
||||
const key = typeof keyName !== 'undefined' ? keyName : `${id}.${ext}`;
|
||||
const webPath = (0, path_1.join)('/files/', this.bucket, key);
|
||||
const publicPath = (0, path_1.join)(`${this.bucket}.${this.endpoint}`, key);
|
||||
const record = {
|
||||
id,
|
||||
created: +new Date(),
|
||||
name: fileName,
|
||||
public: publicPath,
|
||||
path: webPath,
|
||||
path_hash: this.hash(webPath),
|
||||
hash: null,
|
||||
type: mimetype,
|
||||
size: null
|
||||
};
|
||||
const params = {
|
||||
Bucket: this.bucket,
|
||||
Key: key
|
||||
};
|
||||
const upload = this.s3Stream.upload(params);
|
||||
const stream = (0, fs_1.createReadStream)(filePath);
|
||||
upload.maxPartSize(20971520); // 20 MB
|
||||
upload.concurrentParts(5);
|
||||
return new Promise((resolve, reject) => {
|
||||
upload.on('error', (err) => {
|
||||
return reject(err);
|
||||
});
|
||||
upload.on('part', (details) => {
|
||||
console.log(`${details.ETag} - part: ${details.PartNumber} received: ${details.receivedSize} uploaded: ${details.uploadedSize}`);
|
||||
});
|
||||
upload.on('uploaded', (details) => {
|
||||
record.hash = details.ETag;
|
||||
record.size = details.uploadedSize;
|
||||
console.log(`Saved file ${record.path}`);
|
||||
return resolve(record);
|
||||
});
|
||||
console.log(`Streaming ${record.path} to S3`);
|
||||
stream.pipe(upload);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Read a file from S3 using a key
|
||||
*
|
||||
* @param {string} key File key
|
||||
*
|
||||
* @returns {string} File data
|
||||
**/
|
||||
async read(key) {
|
||||
const params = {
|
||||
Bucket: this.bucket,
|
||||
Key: key
|
||||
};
|
||||
return new Promise((resolve, reject) => {
|
||||
return this.s3.getObject(params, (err, data) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
return resolve(data.Body); //buffer
|
||||
});
|
||||
});
|
||||
}
|
||||
/*
|
||||
const xhr = new XMLHttpRequest();
|
||||
xhr.open('PUT', signedUrl);
|
||||
xhr.setRequestHeader('Content-Type', file.type);
|
||||
xhr.setRequestHeader('x-amz-acl', 'public-read');
|
||||
xhr.send(file);
|
||||
*/
|
||||
/**
|
||||
* Get a signed put key for writing
|
||||
*
|
||||
* @param {string} key Key that file will be located at
|
||||
* @param {string} fileType Mimetype of file
|
||||
*
|
||||
* @returns {string} Url of signed key
|
||||
**/
|
||||
async signedPutKey(key, fileType) {
|
||||
const s3Params = {
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
ContentType: fileType,
|
||||
Expires: 86400 //1 day
|
||||
//ACL: 'public-read',
|
||||
};
|
||||
return new Promise((resolve, reject) => {
|
||||
return this.s3.getSignedUrl('putObject', s3Params, (err, url) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
return resolve(url);
|
||||
});
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Get a signed read key for writing
|
||||
*
|
||||
* @param {string} key Key that file will be located at
|
||||
*
|
||||
* @returns {string} Url of signed key
|
||||
**/
|
||||
async signedGetKey(key) {
|
||||
const s3Params = {
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
Expires: 86400 //1 day
|
||||
//ACL: 'public-read',
|
||||
//Expires: 60 ?
|
||||
};
|
||||
return new Promise((resolve, reject) => {
|
||||
return this.s3.getSignedUrl('getObject', s3Params, (err, url) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
return resolve(url);
|
||||
});
|
||||
});
|
||||
}
|
||||
/*
|
||||
readStream (to express or server)
|
||||
s3.getObject(params)
|
||||
.on('httpHeaders', function (statusCode, headers) {
|
||||
res.set('Content-Length', headers['content-length']);
|
||||
res.set('Content-Type', headers['content-type']);
|
||||
this.response.httpResponse.createUnbufferedStream()
|
||||
.pipe(res);
|
||||
})
|
||||
.send();
|
||||
--------
|
||||
var fileStream = fs.createWriteStream('/path/to/file.jpg');
|
||||
var s3Stream = s3.getObject({Bucket: 'myBucket', Key: 'myImageFile.jpg'}).createReadStream();
|
||||
|
||||
// Listen for errors returned by the service
|
||||
s3Stream.on('error', function(err) {
|
||||
// NoSuchKey: The specified key does not exist
|
||||
console.error(err);
|
||||
});
|
||||
|
||||
s3Stream.pipe(fileStream).on('error', function(err) {
|
||||
// capture any errors that occur when writing data to the file
|
||||
console.error('File Stream:', err);
|
||||
}).on('close', function() {
|
||||
console.log('Done.');
|
||||
});
|
||||
*/
|
||||
/**
|
||||
* Delete an object at a specific key
|
||||
*
|
||||
* @param {string} key Key for object
|
||||
*
|
||||
* @returns {boolean} True if successful
|
||||
**/
|
||||
async delete(key) {
|
||||
if (!this.writeable)
|
||||
return false;
|
||||
const params = {
|
||||
Bucket: this.bucket,
|
||||
Key: key
|
||||
};
|
||||
return new Promise((resolve, reject) => {
|
||||
return this.s3.deleteObject(params, (err, data) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
return reject(err);
|
||||
}
|
||||
return resolve(true); //buffer
|
||||
});
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Lists all objects with a specific prefix
|
||||
**/
|
||||
async list(prefix) {
|
||||
const params = {
|
||||
Bucket: this.bucket,
|
||||
Prefix: prefix
|
||||
};
|
||||
return new Promise((resolve, reject) => {
|
||||
return this.s3.listObjectsV2(params, (err, data) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
return reject(err);
|
||||
}
|
||||
return resolve(data);
|
||||
});
|
||||
});
|
||||
}
|
||||
async update() {
|
||||
if (!this.writeable)
|
||||
return false;
|
||||
}
|
||||
}
|
||||
module.exports.Files = Files;
|
||||
//# sourceMappingURL=index.js.map
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,492 @@
|
|||
'use strict';
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Files3 = void 0;
|
||||
require("dotenv/config");
|
||||
const uuid_1 = require("uuid");
|
||||
const path_1 = require("path");
|
||||
const promises_1 = require("fs/promises");
|
||||
const fs_1 = require("fs");
|
||||
const crypto_1 = require("crypto");
|
||||
const mime = __importStar(require("mime-types"));
|
||||
const aws_sdk_1 = require("aws-sdk");
|
||||
const os_1 = require("os");
|
||||
const s3Stream = require("s3-upload-stream");
|
||||
const log_1 = require("../log");
|
||||
const env_1 = require("../env");
|
||||
const TMP_DIR = ((0, env_1.envString)('FILE3_DIR', null) !== null) ? (0, env_1.envString)('FILE3_DIR', '/tmp') : (0, env_1.envString)('TMP_DIR', (0, os_1.tmpdir)());
|
||||
class Files3 {
|
||||
/**
|
||||
* @constructor
|
||||
*
|
||||
*/
|
||||
constructor(bucket, writeable = true) {
|
||||
this.writeable = false;
|
||||
const S3_ENDPOINT = (0, env_1.envString)('S3_ENDPOINT', 'http://127.0.0.1:9000');
|
||||
const spacesEndpoint = new aws_sdk_1.Endpoint(S3_ENDPOINT);
|
||||
const s3Config = {
|
||||
accessKeyId: (0, env_1.envString)('S3_ACCESS_KEY', 'YOUR-ACCESSKEYID'),
|
||||
secretAccessKey: (0, env_1.envString)('S3_ACCESS_SECRET', 'YOUR-SECRETACCESSKEY'),
|
||||
endpoint: spacesEndpoint,
|
||||
signatureVersion: 'v4'
|
||||
};
|
||||
this.endpoint = S3_ENDPOINT;
|
||||
this.s3 = new aws_sdk_1.S3(s3Config);
|
||||
this.s3Stream = s3Stream(this.s3);
|
||||
this.log = (0, log_1.createLog)('files3');
|
||||
this.bucket = bucket;
|
||||
this.writeable = writeable;
|
||||
}
|
||||
/**
|
||||
* Create a SHA256 hash of any data provided.
|
||||
**/
|
||||
hash(data) {
|
||||
return (0, crypto_1.createHash)('sha256').update(data).digest('base64');
|
||||
}
|
||||
/**
|
||||
* Check if file exists
|
||||
**/
|
||||
async exists(path) {
|
||||
try {
|
||||
await (0, promises_1.access)(path);
|
||||
return true;
|
||||
}
|
||||
catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Read file from disk as buffer and create hash of the data.
|
||||
**/
|
||||
async hashFile(filePath) {
|
||||
let data;
|
||||
try {
|
||||
data = await (0, promises_1.readFile)(filePath);
|
||||
}
|
||||
catch (err) {
|
||||
this.log.error(err);
|
||||
}
|
||||
return this.hash(data);
|
||||
}
|
||||
/**
|
||||
* create a file object on an S3 bucket and upload data.
|
||||
* Reads into memory
|
||||
**/
|
||||
async create(file, keyName) {
|
||||
if (!this.writeable)
|
||||
return false;
|
||||
const id = await this.hashFile(file);
|
||||
const ext = mime.extension(file.mimetype);
|
||||
const key = typeof keyName !== 'undefined' ? keyName : `${id}.${ext}`;
|
||||
const webPath = (0, path_1.join)('/files/', this.bucket, key);
|
||||
const publicPath = (0, path_1.join)(`${this.bucket}.${this.endpoint}`, key);
|
||||
const record = {
|
||||
id,
|
||||
created: +new Date(),
|
||||
name: file.originalname,
|
||||
public: publicPath,
|
||||
path: webPath,
|
||||
path_hash: this.hash(webPath),
|
||||
hash: null,
|
||||
type: file.mimetype,
|
||||
size: null
|
||||
};
|
||||
const params = {
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
Body: null
|
||||
};
|
||||
params.Body = file.buffer;
|
||||
record.hash = this.hash(file.buffer);
|
||||
record.size = file.buffer.byteLength;
|
||||
return new Promise((resolve, reject) => {
|
||||
return this.s3.putObject(params, function (err, data) {
|
||||
if (err) {
|
||||
this.log.error(err);
|
||||
return reject(err);
|
||||
}
|
||||
else {
|
||||
this.log.info(`Saved file ${record.path}`);
|
||||
return resolve(record);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
/*
|
||||
* Create an s3 record using only a path reference of a local file
|
||||
* Reads into memory
|
||||
*/
|
||||
async createFromPath(filePath, keyName) {
|
||||
const filename = (0, path_1.basename)(filePath);
|
||||
const mimetype = mime.lookup(filePath);
|
||||
const key = typeof keyName !== 'undefined' ? keyName : null;
|
||||
let file;
|
||||
let buffer;
|
||||
try {
|
||||
buffer = await (0, promises_1.readFile)(filePath);
|
||||
}
|
||||
catch (err) {
|
||||
this.log.error('createFromPath', err);
|
||||
}
|
||||
file = {
|
||||
buffer,
|
||||
mimetype,
|
||||
originalname: filename
|
||||
};
|
||||
return this.create(file, key);
|
||||
}
|
||||
/**
|
||||
* Create an s3 record from a stream
|
||||
* Pass "createReadStream('')" object
|
||||
**/
|
||||
/*public async createStream (file : any) {
|
||||
if (!this.writeable) return false;
|
||||
const id : string = uuid();
|
||||
const ext : string | false = mime.extension(file.mimetype);
|
||||
const key : string = `${id}.${ext}`;
|
||||
const webPath : string = pathJoin('/files/', this.bucket, key);
|
||||
const publicPath : string = pathJoin(`${this.bucket}.${this.endpoint}`, key);
|
||||
const record : FileRecord = {
|
||||
id,
|
||||
created : +new Date(),
|
||||
name : file.originalname,
|
||||
public : publicPath,
|
||||
path : webPath,
|
||||
path_hash : this.hash(webPath),
|
||||
hash : null,
|
||||
type : file.mimetype,
|
||||
size : null
|
||||
};
|
||||
const params : S3Params = {
|
||||
Bucket: this.bucket,
|
||||
Key: key
|
||||
};
|
||||
const upload = this.s3Stream.upload(params);
|
||||
|
||||
upload.maxPartSize(20971520); // 20 MB
|
||||
upload.concurrentParts(5);
|
||||
|
||||
return new Promise((resolve : Function, reject : Function) => {
|
||||
upload.on('error', (err : Error) => {
|
||||
return reject(err);
|
||||
});
|
||||
upload.on('part', (details : any) => {
|
||||
this.log.info(`${details.ETag} - part: ${details.PartNumber} received: ${details.receivedSize} uploaded: ${details.uploadedSize}`)
|
||||
});
|
||||
upload.on('uploaded', (details : any) => {
|
||||
record.hash = details.ETag;
|
||||
record.size = details.uploadedSize;
|
||||
this.log.info(`Saved file ${record.path}`);
|
||||
return resolve(record);
|
||||
});
|
||||
this.log.info(`Streaming ${record.path} to S3`);
|
||||
stream.pipe(upload);
|
||||
});
|
||||
}*/
|
||||
/**
|
||||
* Create a stream . Bind to busboy.on('file', files3.createStream)
|
||||
* ex. (express POST route callback)
|
||||
* var busboy = new Busboy({ headers: req.headers });
|
||||
* busboy.on('file', files3.createStream)
|
||||
* req.pipe(busboy);
|
||||
**/
|
||||
/* public async createStreamExpress (fieldname : string, file : any, filename : string, encoding : any, mimetype : string) {
|
||||
if (!this.writeable) return false;
|
||||
const id : string = uuid();
|
||||
const ext : string | false = mime.extension(mimetype);
|
||||
const key : string = `${id}.${ext}`;
|
||||
const webPath : string = pathJoin('/files/', this.bucket, key);
|
||||
const publicPath : string = pathJoin(`${this.bucket}.${this.endpoint}`, key);
|
||||
const record : FileRecord = {
|
||||
id,
|
||||
created : +new Date(),
|
||||
name : filename,
|
||||
public : publicPath,
|
||||
path : webPath,
|
||||
path_hash : this.hash(webPath),
|
||||
hash : null,
|
||||
type : file.mimetype,
|
||||
size : null
|
||||
};
|
||||
const params : S3Params = {
|
||||
Bucket: this.bucket,
|
||||
Key: key
|
||||
};
|
||||
const upload = this.s3Stream.upload(params);
|
||||
|
||||
upload.maxPartSize(20971520); // 20 MB
|
||||
upload.concurrentParts(5);
|
||||
|
||||
return new Promise((resolve : Function, reject : Function) => {
|
||||
var s3 = new AWS.S3({
|
||||
params: {Bucket: 'sswa', Key: filename, Body: file},
|
||||
options: {partSize: 5 * 1024 * 1024, queueSize: 10} // 5 MB
|
||||
});
|
||||
s3.upload().on('httpUploadProgress', function (evt) {
|
||||
this.log.info(evt);
|
||||
}).send(function (err, data) {
|
||||
s3UploadFinishTime = new Date();
|
||||
if(busboyFinishTime && s3UploadFinishTime) {
|
||||
res.json({
|
||||
uploadStartTime: uploadStartTime,
|
||||
busboyFinishTime: busboyFinishTime,
|
||||
s3UploadFinishTime: s3UploadFinishTime
|
||||
});
|
||||
}
|
||||
this.log.info(err, data);
|
||||
});
|
||||
file.on('data', ( data : any ) => {
|
||||
upload.on('error', (err : Error) => {
|
||||
return reject(err);
|
||||
});
|
||||
upload.on('part', (details : any) => {
|
||||
this.log.info(`${details.ETag} - part: ${details.PartNumber} received: ${details.receivedSize} uploaded: ${details.uploadedSize}`)
|
||||
});
|
||||
upload.on('uploaded', (details : any) => {
|
||||
record.hash = details.ETag;
|
||||
record.size = details.uploadedSize;
|
||||
this.log.info(`Saved file ${record.path}`)
|
||||
return resolve(record)
|
||||
});
|
||||
data.pipe(upload);
|
||||
})
|
||||
});
|
||||
}*/
|
||||
/**
|
||||
* Create a stream from a path on the local device
|
||||
*
|
||||
* @param {string} filePath Path to file
|
||||
* @param {string} keyName (optional) Predefined key
|
||||
**/
|
||||
async createStreamFromPath(filePath, keyName) {
|
||||
if (!this.writeable)
|
||||
return false;
|
||||
const id = (0, uuid_1.v4)();
|
||||
const fileName = (0, path_1.basename)(filePath);
|
||||
const mimetype = mime.lookup(filePath);
|
||||
const ext = mime.extension(fileName);
|
||||
const key = typeof keyName !== 'undefined' ? keyName : `${id}.${ext}`;
|
||||
const webPath = (0, path_1.join)('/files/', this.bucket, key);
|
||||
const publicPath = (0, path_1.join)(`${this.bucket}.${this.endpoint}`, key);
|
||||
const record = {
|
||||
id,
|
||||
created: +new Date(),
|
||||
name: fileName,
|
||||
public: publicPath,
|
||||
path: webPath,
|
||||
path_hash: this.hash(webPath),
|
||||
hash: null,
|
||||
type: mimetype ? mimetype : null,
|
||||
size: null
|
||||
};
|
||||
const params = {
|
||||
Bucket: this.bucket,
|
||||
Key: key
|
||||
};
|
||||
const upload = this.s3Stream.upload(params);
|
||||
const stream = (0, fs_1.createReadStream)(filePath);
|
||||
upload.maxPartSize(20971520); // 20 MB
|
||||
upload.concurrentParts(5);
|
||||
return new Promise((resolve, reject) => {
|
||||
upload.on('error', (err) => {
|
||||
return reject(err);
|
||||
});
|
||||
upload.on('part', (details) => {
|
||||
this.log.info(`${details.ETag} - part: ${details.PartNumber} received: ${details.receivedSize} uploaded: ${details.uploadedSize}`);
|
||||
});
|
||||
upload.on('uploaded', (details) => {
|
||||
record.hash = details.ETag;
|
||||
record.size = details.uploadedSize;
|
||||
this.log.info(`Saved file ${record.path}`);
|
||||
return resolve(record);
|
||||
});
|
||||
this.log.info(`Streaming ${record.path} to S3`);
|
||||
stream.pipe(upload);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Read a file from S3 using a key
|
||||
*
|
||||
* @param {string} key File key
|
||||
*
|
||||
* @returns {string} File data
|
||||
**/
|
||||
async read(key) {
|
||||
const params = {
|
||||
Bucket: this.bucket,
|
||||
Key: key
|
||||
};
|
||||
return new Promise((resolve, reject) => {
|
||||
return this.s3.getObject(params, (err, data) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
return resolve(data.Body); //buffer
|
||||
});
|
||||
});
|
||||
}
|
||||
/*
|
||||
const xhr = new XMLHttpRequest();
|
||||
xhr.open('PUT', signedUrl);
|
||||
xhr.setRequestHeader('Content-Type', file.type);
|
||||
xhr.setRequestHeader('x-amz-acl', 'public-read');
|
||||
xhr.send(file);
|
||||
*/
|
||||
/**
|
||||
* Get a signed put key for writing
|
||||
*
|
||||
* @param {string} key Key that file will be located at
|
||||
* @param {string} fileType Mimetype of file
|
||||
*
|
||||
* @returns {string} Url of signed key
|
||||
**/
|
||||
async signedPutKey(key, fileType) {
|
||||
const s3Params = {
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
ContentType: fileType,
|
||||
Expires: new Date((new Date()).getTime() + 24 * 60 * 60 * 1000) //1 day
|
||||
//ACL: 'public-read',
|
||||
};
|
||||
return new Promise((resolve, reject) => {
|
||||
return this.s3.getSignedUrl('putObject', s3Params, (err, url) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
return resolve(url);
|
||||
});
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Get a signed read key for writing
|
||||
*
|
||||
* @param {string} key Key that file will be located at
|
||||
*
|
||||
* @returns {string} Url of signed key
|
||||
**/
|
||||
async signedGetKey(key) {
|
||||
const s3Params = {
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
//Expires: new Date((new Date()).getTime() + 24 * 60 * 60 * 1000) //1 day
|
||||
//ACL: 'public-read',
|
||||
//Expires: 60 ?
|
||||
};
|
||||
return new Promise((resolve, reject) => {
|
||||
return this.s3.getSignedUrl('getObject', s3Params, (err, url) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
return resolve(url);
|
||||
});
|
||||
});
|
||||
}
|
||||
/*
|
||||
readStream (to express or server)
|
||||
s3.getObject(params)
|
||||
.on('httpHeaders', function (statusCode, headers) {
|
||||
res.set('Content-Length', headers['content-length']);
|
||||
res.set('Content-Type', headers['content-type']);
|
||||
this.response.httpResponse.createUnbufferedStream()
|
||||
.pipe(res);
|
||||
})
|
||||
.send();
|
||||
--------
|
||||
var fileStream = fs.createWriteStream('/path/to/file.jpg');
|
||||
var s3Stream = s3.getObject({Bucket: 'myBucket', Key: 'myImageFile.jpg'}).createReadStream();
|
||||
|
||||
// Listen for errors returned by the service
|
||||
s3Stream.on('error', function(err) {
|
||||
// NoSuchKey: The specified key does not exist
|
||||
this.log.error(err);
|
||||
});
|
||||
|
||||
s3Stream.pipe(fileStream).on('error', function(err) {
|
||||
// capture any errors that occur when writing data to the file
|
||||
this.log.error('File Stream:', err);
|
||||
}).on('close', function() {
|
||||
this.log.info('Done.');
|
||||
});
|
||||
*/
|
||||
/**
|
||||
* Delete an object at a specific key
|
||||
*
|
||||
* @param {string} key Key for object
|
||||
*
|
||||
* @returns {boolean} True if successful
|
||||
**/
|
||||
async delete(key) {
|
||||
if (!this.writeable)
|
||||
return false;
|
||||
const params = {
|
||||
Bucket: this.bucket,
|
||||
Key: key
|
||||
};
|
||||
return new Promise((resolve, reject) => {
|
||||
return this.s3.deleteObject(params, (err, data) => {
|
||||
if (err) {
|
||||
this.log.error(err);
|
||||
return reject(err);
|
||||
}
|
||||
return resolve(true); //buffer
|
||||
});
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Lists all objects with a specific prefix
|
||||
**/
|
||||
async list(prefix) {
|
||||
const params = {
|
||||
Bucket: this.bucket,
|
||||
Prefix: prefix
|
||||
};
|
||||
return new Promise((resolve, reject) => {
|
||||
return this.s3.listObjectsV2(params, (err, data) => {
|
||||
if (err) {
|
||||
this.log.error(err);
|
||||
return reject(err);
|
||||
}
|
||||
return resolve(data);
|
||||
});
|
||||
});
|
||||
}
|
||||
async update() {
|
||||
if (!this.writeable)
|
||||
return false;
|
||||
}
|
||||
}
|
||||
exports.Files3 = Files3;
|
||||
module.exports = { Files3 };
|
||||
//# sourceMappingURL=index.js.map
|
File diff suppressed because one or more lines are too long
|
@ -1,23 +1,69 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
require("dotenv/config");
|
||||
const log_1 = require("./log");
|
||||
const promises_1 = require("fs/promises");
|
||||
const path_1 = require("path");
|
||||
const sizeOf = __importStar(require("image-size"));
|
||||
const shell_1 = require("./shell");
|
||||
const files3_1 = require("./files3");
|
||||
const env_1 = require("./env");
|
||||
const db_1 = require("./db");
|
||||
class Generate {
|
||||
constructor() {
|
||||
this.inbox = typeof process.env.INBOX !== 'undefined' ? process.env.INBOX : '~/Photos/toprocess';
|
||||
this.inbox = (0, env_1.envString)('INBOX', '~/Photos/toprocess');
|
||||
this.photos = (0, env_1.envString)('PHOTOS', '~/Photos/processed');
|
||||
this.log = (0, log_1.createLog)('generate');
|
||||
this.log.info(`Generating site: ${new Date()}`);
|
||||
this.db = new db_1.DB();
|
||||
this.s3 = new files3_1.Files3((0, env_1.envString)('S3_BUCKET', 'mmcwilliamsphotos'), true);
|
||||
this.generate();
|
||||
}
|
||||
async generate() {
|
||||
//check version
|
||||
//sync
|
||||
await this.checkInbox();
|
||||
//validate
|
||||
}
|
||||
async checkInbox() {
|
||||
let inbox;
|
||||
let images;
|
||||
let filename;
|
||||
let meta;
|
||||
let dimensions;
|
||||
try {
|
||||
inbox = await (0, promises_1.realpath)(this.inbox);
|
||||
}
|
||||
|
@ -32,7 +78,7 @@ class Generate {
|
|||
this.log.error(err);
|
||||
return;
|
||||
}
|
||||
images = images.filter(el => {
|
||||
images = images.filter((el) => {
|
||||
if (el.toLowerCase().indexOf('.jpg') !== -1
|
||||
|| el.toLowerCase().indexOf('.jpeg') !== -1
|
||||
|| el.toLowerCase().indexOf('.tif') !== -1
|
||||
|
@ -45,11 +91,21 @@ class Generate {
|
|||
this.log.info(`No new images found`);
|
||||
return;
|
||||
}
|
||||
images = images.map(el => (0, path_1.join)(inbox, el));
|
||||
console.dir(images);
|
||||
images = await Promise.all(images.map(async (el) => {
|
||||
return await (0, promises_1.realpath)((0, path_1.join)(inbox, el));
|
||||
}));
|
||||
for (let image of images) {
|
||||
this.log.info(image);
|
||||
filename = (0, path_1.basename)(image);
|
||||
meta = this.parseFilename(filename);
|
||||
dimensions = await this.getImageDimensions(image);
|
||||
meta.width = dimensions.width;
|
||||
meta.height = dimensions.height;
|
||||
console.dir(meta);
|
||||
}
|
||||
}
|
||||
async img(file) {
|
||||
const cmd = ['bash', 'scripts/img.sh', file];
|
||||
async img(file, exif) {
|
||||
const cmd = ['bash', 'scripts/img.sh', file, exif];
|
||||
const shell = new shell_1.Shell(cmd);
|
||||
try {
|
||||
await shell.execute();
|
||||
|
@ -60,6 +116,65 @@ class Generate {
|
|||
}
|
||||
this.log.info(`Processed image file for ${file}`);
|
||||
}
|
||||
async getImageDimensions(imagePath) {
|
||||
let dimensions;
|
||||
try {
|
||||
dimensions = await sizeOf(imagePath);
|
||||
return dimensions;
|
||||
}
|
||||
catch (err) {
|
||||
this.log.error('Error getting image dimensions:', err);
|
||||
}
|
||||
}
|
||||
capitalize(str) {
|
||||
return (str.substring(0, 1).toUpperCase()) + str.substring(1);
|
||||
}
|
||||
formatProperNouns(str) {
|
||||
let parts = str.split('-');
|
||||
parts = parts.map(el => this.capitalize(el));
|
||||
return parts.join(' ');
|
||||
}
|
||||
//year
|
||||
//month
|
||||
//day
|
||||
//format
|
||||
//filmstock
|
||||
//location
|
||||
//description
|
||||
//original
|
||||
//2024_12_02_35mm_Kodak-Gold-200_Somerville-MA_Walk-with-Charlie#000061280009.tif
|
||||
parseFilename(filename) {
|
||||
const halves = filename.split('#');
|
||||
const parts = halves[0].split('');
|
||||
let meta = {};
|
||||
for (let i = 0; i < parts.length; i++) {
|
||||
switch (i) {
|
||||
case 0:
|
||||
meta.year = parseInt(parts[i]);
|
||||
break;
|
||||
case 1:
|
||||
meta.month = parseInt(parts[i]);
|
||||
break;
|
||||
case 2:
|
||||
meta.day = parseInt(parts[i]);
|
||||
break;
|
||||
case 3:
|
||||
meta.format = parts[i];
|
||||
break;
|
||||
case 4:
|
||||
meta.filmstock = parts[i].split('-').join(' ');
|
||||
break;
|
||||
case 5:
|
||||
meta.location = parts[i].split('-').join(' ');
|
||||
break;
|
||||
case 6:
|
||||
meta.description = parts[i].split('-').join(' ');
|
||||
break;
|
||||
}
|
||||
}
|
||||
meta.original = halves[1];
|
||||
return meta;
|
||||
}
|
||||
}
|
||||
new Generate();
|
||||
//# sourceMappingURL=generate.js.map
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"generate.js","sourceRoot":"","sources":["../src/generate.ts"],"names":[],"mappings":";;AAAA,yBAAuB;AACvB,+BAAkC;AAGlC,0CAA0D;AAC1D,+BAA4B;AAC5B,mCAAgC;AAGhC,MAAM,QAAQ;IAKb;QAFQ,UAAK,GAAY,OAAO,OAAO,CAAC,GAAG,CAAC,KAAK,KAAK,WAAW,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,oBAAoB,CAAC;QAG5G,IAAI,CAAC,GAAG,GAAG,IAAA,eAAS,EAAC,UAAU,CAAC,CAAC;QACjC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,oBAAoB,IAAI,IAAI,EAAE,EAAE,CAAC,CAAC;QAChD,IAAI,CAAC,QAAQ,EAAE,CAAC;IAEjB,CAAC;IAEO,KAAK,CAAC,QAAQ;QACrB,MAAM,IAAI,CAAC,UAAU,EAAE,CAAC;IACzB,CAAC;IAEO,KAAK,CAAC,UAAU;QACvB,IAAI,KAAc,CAAC;QACnB,IAAI,MAAiB,CAAC;QAEtB,IAAI,CAAC;YACJ,KAAK,GAAG,MAAM,IAAA,mBAAQ,EAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACpC,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACd,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;YACpB,OAAO;QACR,CAAC;QAED,IAAI,CAAC;YACJ,MAAM,GAAG,MAAM,IAAA,kBAAO,EAAC,KAAK,CAAC,CAAC;QAC/B,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACd,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;YACpB,OAAO;QACR,CAAC;QAED,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE;YAC3B,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;mBACvC,EAAE,CAAC,WAAW,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;mBACxC,EAAE,CAAC,WAAW,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;mBACvC,EAAE,CAAC,WAAW,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC;gBAC9C,OAAO,IAAI,CAAC;YACb,CAAC;YACD,OAAO,KAAK,CAAC;QACd,CAAC,CAAC,CAAC;QAEH,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACzB,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,qBAAqB,CAAC,CAAC;YACrC,OAAO;QACR,CAAC;QAED,MAAM,GAAG,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,CAAC,IAAA,WAAI,EAAC,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC;QAC3C,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;IACpB,CAAC;IAEO,KAAK,CAAC,GAAG,CAAE,IAAa;QAC/B,MAAM,GAAG,GAAc,CAAC,MAAM,EAAE,gBAAgB,EAAE,IAAI,CAAC,CAAC;QACxD,MAAM,KAAK,GAAW,IAAI,aAAK,CAAC,GAAG,CAAC,CAAC;QACrC,IAAI,CAAC;YACJ,MAAM,KAAK,CAAC,OAAO,EAAE,CAAC;QACvB,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACd,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;YACpB,OAAO;QACR,CAAC;QACD,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,4BAA4B,IAAI,EAAE,CAAC,CAAC;IACnD,CAAC;CACD;AAED,IAAI,QAAQ,EAAE,CAAC"}
|
||||
{"version":3,"file":"generate.js","sourceRoot":"","sources":["../src/generate.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,yBAAuB;AACvB,+BAAkC;AAElC,0CAA0D;AAC1D,+BAAsC;AACtC,mDAAqC;AAErC,mCAAgC;AAEhC,qCAAiC;AACjC,+BAAkC;AAClC,6BAA0B;AAe1B,MAAM,QAAQ;IAQb;QALQ,UAAK,GAAY,IAAA,eAAS,EAAC,OAAO,EAAE,oBAAoB,CAAC,CAAC;QAC1D,WAAM,GAAY,IAAA,eAAS,EAAC,QAAQ,EAAE,oBAAoB,CAAC,CAAC;QAKnE,IAAI,CAAC,GAAG,GAAG,IAAA,eAAS,EAAC,UAAU,CAAC,CAAC;QACjC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,oBAAoB,IAAI,IAAI,EAAE,EAAE,CAAC,CAAC;QAChD,IAAI,CAAC,EAAE,GAAG,IAAI,OAAE,EAAE,CAAC;QACnB,IAAI,CAAC,EAAE,GAAG,IAAI,eAAM,CAAC,IAAA,eAAS,EAAC,WAAW,EAAE,mBAAmB,CAAC,EAAE,IAAI,CAAC,CAAC;QACxE,IAAI,CAAC,QAAQ,EAAE,CAAC;IACjB,CAAC;IAEO,KAAK,CAAC,QAAQ;QACrB,eAAe;QAEf,MAAM;QACN,MAAM,IAAI,CAAC,UAAU,EAAE,CAAC;QACxB,UAAU;IACX,CAAC;IAEO,KAAK,CAAC,UAAU;QACvB,IAAI,KAAc,CAAC;QACnB,IAAI,MAAiB,CAAC;QACtB,IAAI,QAAiB,CAAC;QACtB,IAAI,IAAe,CAAC;QACpB,IAAI,UAAgB,CAAC;QAErB,IAAI,CAAC;YACJ,KAAK,GAAG,MAAM,IAAA,mBAAQ,EAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACpC,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACd,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;YACpB,OAAO;QACR,CAAC;QAED,IAAI,CAAC;YACJ,MAAM,GAAG,MAAM,IAAA,kBAAO,EAAC,KAAK,CAAC,CAAC;QAC/B,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACd,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;YACpB,OAAO;QACR,CAAC;QAED,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,EAAW,EAAE,EAAE;YACtC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;mBACvC,EAAE,CAAC,WAAW,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;mBACxC,EAAE,CAAC,WAAW,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;mBACvC,EAAE,CAAC,WAAW,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC;gBAC9C,OAAO,IAAI,CAAC;YACb,CAAC;YACD,OAAO,KAAK,CAAC;QACd,CAAC,CAAC,CAAC;QAEH,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YACzB,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,qBAAqB,CAAC,CAAC;YACrC,OAAO;QACR,CAAC;QAED,MAAM,GAAG,MAAM,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,EAAE,EAAQ,EAAoB,EAAE;YACzE,OAAO,MAAM,IAAA,mBAAQ,EAAC,IAAA,WAAI,EAAC,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC;QACxC,CAAC,CAAC,CACF,CAAC;QACF,KAAK,IAAI,KAAK,IAAI,MAAM,EAAE,CAAC;YAC1B,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;YACrB,QAAQ,GAAG,IAAA,eAAQ,EAAC,KAAK,CAAC,CAAC;YAC3B,IAAI,GAAG,IAAI,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;YACpC,UAAU,GAAG,MAAM,IAAI,CAAC,kBAAkB,CAAC,KAAK,CAAC,CAAC;YAClD,IAAI,CAAC,KAAK,GAAG,UAAU,CAAC,KAAK,CAAC;YAC9B,IAAI,CAAC,MAAM,GAAG,UAAU,CAAC,MAAM,CAAC;YAChC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAA;QAClB,CAAC;IACF,CAAC;IAEO,KAAK,CAAC,GAAG,CAAE,IAAa,EAAE,IAAa;QAC9C,MAAM,GAAG,GAAc,CAAC,MAAM,EAAE,gBAAgB,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;QAC9D,MAAM,KAAK,GAAW,IAAI,aAAK,CAAC,GAAG,CAAC,CAAC;QACrC,IAAI,CAAC;YACJ,MAAM,KAAK,CAAC,OAAO,EAAE,CAAC;QACvB,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACd,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;YACpB,OAAO;QACR,CAAC;QACD,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,4BAA4B,IAAI,EAAE,CAAC,CAAC;IACnD,CAAC;IAED,KAAK,CAAC,kBAAkB,CAAE,SAAiB;QAC1C,IAAI,UAAgB,CAAC;QACrB,IAAI,CAAC;YACJ,UAAU,GAAG,MAAM,MAAM,CAAC,SAAS,CAAC,CAAC;YACrC,OAAO,UAAU,CAAC;QACnB,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACd,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,iCAAiC,EAAE,GAAG,CAAC,CAAC;QACxD,CAAC;IACF,CAAC;IAEO,UAAU,CAAE,GAAY;QAC/B,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;IAC/D,CAAC;IAEO,iBAAiB,CAAE,GAAY;QACtC,IAAI,KAAK,GAAc,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QACtC,KAAK,GAAG,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,CAAC;QAC7C,OAAO,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IACxB,CAAC;IAED,MAAM;IACN,OAAO;IACP,KAAK;IACL,QAAQ;IACR,WAAW;IACX,UAAU;IACV,aAAa;IACb,UAAU;IAEV,iFAAiF;IAEzE,aAAa,CAAE,QAAiB;QACvC,MAAM,MAAM,GAAc,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;QAC7C,MAAM,KAAK,GAAc,MAAM,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;QAC7C,IAAI,IAAI,GAAc,EAAE,CAAC;QACzB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;YACvC,QAAQ,CAAC,EAAE,CAAC;gBACX,KAAK,CAAC;oBACL,IAAI,CAAC,IAAI,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;oBAC/B,MAAM;gBACP,KAAK,CAAC;oBACL,IAAI,CAAC,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;oBAChC,MAAM;gBACP,KAAK,CAAC;oBACL,IAAI,CAAC,GAAG,GAAG,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;oBAC9B,MAAM;gBACP,KAAK,CAAC;oBACL,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;oBACvB,MAAM;gBACP,KAAK,CAAC;oBACL,IAAI,CAAC,SAAS,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;oBAC/C,MAAM;gBACP,KAAK,CAAC;oBACL,IAAI,CAAC,QAAQ,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;oBAC9C,MAAM;gBACP,KAAK,CAAC;oBACL,IAAI,CAAC,WAAW,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;oBACjD,MAAM;YACR,CAAC;QACF,CAAC;QACD,IAAI,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;QAC1B,OAAO,IAAI,CAAC;IACb,CAAC;CACD;AAED,IAAI,QAAQ,EAAE,CAAC"}
|
|
@ -1,6 +1,6 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.hash = void 0;
|
||||
exports.hash = hash;
|
||||
const fs_1 = require("fs");
|
||||
const crypto_1 = require("crypto");
|
||||
function hash(path) {
|
||||
|
@ -12,6 +12,5 @@ function hash(path) {
|
|||
stream.on('end', () => resolve(hashSum.digest('hex')));
|
||||
});
|
||||
}
|
||||
exports.hash = hash;
|
||||
module.exports = { hash };
|
||||
//# sourceMappingURL=index.js.map
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/hash/index.ts"],"names":[],"mappings":";;;AAAA,2BAAsC;AACtC,mCAA0C;AAE1C,SAAgB,IAAI,CAAE,IAAa;IAClC,OAAO,IAAI,OAAO,CAAC,CAAC,OAAkB,EAAE,MAAiB,EAAE,EAAE;QAC5D,MAAM,OAAO,GAAU,IAAA,mBAAU,EAAC,QAAQ,CAAC,CAAC;QAC5C,MAAM,MAAM,GAAS,IAAA,qBAAgB,EAAC,IAAI,CAAC,CAAC;QAC5C,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAW,EAAE,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;QACjD,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,KAAc,EAAE,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;QAC7D,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IACxD,CAAC,CAAC,CAAC;AACJ,CAAC;AARD,oBAQC;AAED,MAAM,CAAC,OAAO,GAAG,EAAE,IAAI,EAAE,CAAC"}
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/hash/index.ts"],"names":[],"mappings":";;AAGA,oBAQC;AAXD,2BAAsC;AACtC,mCAA0C;AAE1C,SAAgB,IAAI,CAAE,IAAa;IAClC,OAAO,IAAI,OAAO,CAAC,CAAC,OAAkB,EAAE,MAAiB,EAAE,EAAE;QAC5D,MAAM,OAAO,GAAU,IAAA,mBAAU,EAAC,QAAQ,CAAC,CAAC;QAC5C,MAAM,MAAM,GAAS,IAAA,qBAAgB,EAAC,IAAI,CAAC,CAAC;QAC5C,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAW,EAAE,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;QACjD,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,KAAc,EAAE,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;QAC7D,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IACxD,CAAC,CAAC,CAAC;AACJ,CAAC;AAED,MAAM,CAAC,OAAO,GAAG,EAAE,IAAI,EAAE,CAAC"}
|
|
@ -1,6 +1,6 @@
|
|||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createLog = void 0;
|
||||
exports.createLog = createLog;
|
||||
/** @module log */
|
||||
/** Wrapper for winston that tags streams and optionally writes files with a simple interface. */
|
||||
/** Module now also supports optional papertrail integration, other services to follow */
|
||||
|
@ -45,6 +45,5 @@ function createLog(label, filename = null) {
|
|||
transports: tports
|
||||
});
|
||||
}
|
||||
exports.createLog = createLog;
|
||||
module.exports = { createLog };
|
||||
//# sourceMappingURL=index.js.map
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/log/index.ts"],"names":[],"mappings":"AAAA,YAAY,CAAA;;;AAEZ,kBAAkB;AAClB,iGAAiG;AACjG,yFAAyF;AAEzF,qCAA2D;AAC3D,MAAM,EAAE,KAAK,EAAE,GAAG,OAAO,CAAC,aAAa,CAAC,CAAC;AACzC,MAAM,EAAE,QAAQ,EAAE,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;AAEvC,MAAM,QAAQ,GAAY,OAAO,CAAC,GAAG,CAAC,QAAQ,IAAI,SAAS,CAAC;AAE5D,IAAI,iBAAiB,CAAC;AAEtB,SAAS,YAAY,CAAE,KAAW;IAChC,IAAI,QAAQ,CAAC,KAAK,CAAC,EAAE,CAAC;QACpB,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;IAC/B,CAAC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,MAAM,GAAG,GAAG,IAAA,gBAAM,EAAC,CAAC,IAAU,EAAE,EAAE;IAC9B,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC;IAChC,MAAM,OAAO,GAAG,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAC3C,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/C,IAAI,CAAC,OAAO,GAAG,GAAG,OAAO,IAAI,IAAI,EAAE,CAAC;IACpC,OAAO,IAAI,CAAC;AAChB,CAAC,CAAC,CAAC;AAEH,MAAM,QAAQ,GAAG,gBAAM,CAAC,MAAM,CAAC,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,EAAE,SAAS,EAAQ,EAAE,EAAE;IAC5E,OAAO,GAAG,SAAS,KAAK,KAAK,KAAK,KAAK,KAAK,OAAO,EAAE,CAAC;AACxD,CAAC,CAAC,CAAC;AAEH;;;;;;;EAOE;AACF,SAAgB,SAAS,CAAE,KAAc,EAAE,WAAoB,IAAI;IAC/D,MAAM,MAAM,GAAW,CAAE,IAAI,CAAC,oBAAU,CAAC,OAAO,CAAC,EAAE,CAAE,CAAC;IACtD,MAAM,IAAI,GAAS,gBAAM,CAAC,OAAO,CAC7B,GAAG,EAAE,EACL,gBAAM,CAAC,KAAK,CAAC,EAAE,KAAK,EAAE,CAAC,EACvB,gBAAM,CAAC,SAAS,CAAC,EAAC,MAAM,EAAE,yBAAyB,EAAC,CAAC,EACrD,gBAAM,CAAC,QAAQ,EAAE,EACjB,QAAQ,CACX,CAAC;IACF,IAAI,cAAoB,CAAC;IAEzB,IAAI,QAAQ,KAAK,IAAI,EAAE,CAAC;QACpB,MAAM,CAAC,IAAI,CAAE,IAAI,CAAC,oBAAU,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,EAAE,CAAC,CAAE,CAAC;IACvD,CAAC;IAED,OAAO,IAAA,sBAAY,EAAC;QAChB,MAAM,EAAG,IAAI;QACb,UAAU,EAAG,MAAM;KACtB,CAAC,CAAC;AACP,CAAC;AAnBD,8BAmBC;AAED,MAAM,CAAC,OAAO,GAAG,EAAE,SAAS,EAAE,CAAC"}
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/log/index.ts"],"names":[],"mappings":"AAAA,YAAY,CAAA;;AAyCZ,8BAmBC;AA1DD,kBAAkB;AAClB,iGAAiG;AACjG,yFAAyF;AAEzF,qCAA2D;AAC3D,MAAM,EAAE,KAAK,EAAE,GAAG,OAAO,CAAC,aAAa,CAAC,CAAC;AACzC,MAAM,EAAE,QAAQ,EAAE,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;AAEvC,MAAM,QAAQ,GAAY,OAAO,CAAC,GAAG,CAAC,QAAQ,IAAI,SAAS,CAAC;AAE5D,IAAI,iBAAiB,CAAC;AAEtB,SAAS,YAAY,CAAE,KAAW;IAChC,IAAI,QAAQ,CAAC,KAAK,CAAC,EAAE,CAAC;QACpB,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;IAC/B,CAAC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,MAAM,GAAG,GAAG,IAAA,gBAAM,EAAC,CAAC,IAAU,EAAE,EAAE;IAC9B,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC;IAChC,MAAM,OAAO,GAAG,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAC3C,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/C,IAAI,CAAC,OAAO,GAAG,GAAG,OAAO,IAAI,IAAI,EAAE,CAAC;IACpC,OAAO,IAAI,CAAC;AAChB,CAAC,CAAC,CAAC;AAEH,MAAM,QAAQ,GAAG,gBAAM,CAAC,MAAM,CAAC,CAAC,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,EAAE,SAAS,EAAQ,EAAE,EAAE;IAC5E,OAAO,GAAG,SAAS,KAAK,KAAK,KAAK,KAAK,KAAK,OAAO,EAAE,CAAC;AACxD,CAAC,CAAC,CAAC;AAEH;;;;;;;EAOE;AACF,SAAgB,SAAS,CAAE,KAAc,EAAE,WAAoB,IAAI;IAC/D,MAAM,MAAM,GAAW,CAAE,IAAI,CAAC,oBAAU,CAAC,OAAO,CAAC,EAAE,CAAE,CAAC;IACtD,MAAM,IAAI,GAAS,gBAAM,CAAC,OAAO,CAC7B,GAAG,EAAE,EACL,gBAAM,CAAC,KAAK,CAAC,EAAE,KAAK,EAAE,CAAC,EACvB,gBAAM,CAAC,SAAS,CAAC,EAAC,MAAM,EAAE,yBAAyB,EAAC,CAAC,EACrD,gBAAM,CAAC,QAAQ,EAAE,EACjB,QAAQ,CACX,CAAC;IACF,IAAI,cAAoB,CAAC;IAEzB,IAAI,QAAQ,KAAK,IAAI,EAAE,CAAC;QACpB,MAAM,CAAC,IAAI,CAAE,IAAI,CAAC,oBAAU,CAAC,IAAI,CAAC,CAAC,EAAE,QAAQ,EAAE,CAAC,CAAE,CAAC;IACvD,CAAC;IAED,OAAO,IAAA,sBAAY,EAAC;QAChB,MAAM,EAAG,IAAI;QACb,UAAU,EAAG,MAAM;KACtB,CAAC,CAAC;AACP,CAAC;AAED,MAAM,CAAC,OAAO,GAAG,EAAE,SAAS,EAAE,CAAC"}
|
File diff suppressed because it is too large
Load Diff
|
@ -19,19 +19,27 @@
|
|||
"devDependencies": {
|
||||
"@types/handlebars-helpers": "^0.5.6",
|
||||
"@types/lodash": "^4.14.202",
|
||||
"@types/mime-types": "^2.1.4",
|
||||
"@types/node": "^20.10.6",
|
||||
"@types/s3-upload-stream": "^1.0.7",
|
||||
"@types/sqlite3": "^3.1.11",
|
||||
"@types/triple-beam": "^1.3.5",
|
||||
"@types/uuid": "^10.0.0",
|
||||
"@types/winston": "^2.4.4",
|
||||
"typescript": "^5.3.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@atproto/api": "^0.13.18",
|
||||
"aws-sdk": "^2.1692.0",
|
||||
"dotenv": "^16.3.1",
|
||||
"handlebars": "^4.7.8",
|
||||
"handlebars-helpers": "^0.10.0",
|
||||
"image-size": "^1.1.1",
|
||||
"lodash": "^4.17.21",
|
||||
"mime": "^4.0.1",
|
||||
"mime-types": "^2.1.35",
|
||||
"s3-cli": "^0.13.0",
|
||||
"s3-upload-stream": "^1.0.7",
|
||||
"sqlite3": "^5.1.7",
|
||||
"triple-beam": "^1.4.1",
|
||||
"uuid": "^9.0.1",
|
||||
|
|
|
@ -2,4 +2,10 @@
|
|||
|
||||
set -e
|
||||
|
||||
source .env
|
||||
|
||||
mkdir -p data
|
||||
|
||||
cat "sql/setup.sql" | sqlite3 "${DB}"
|
||||
|
||||
node dist/generate
|
|
@ -5,10 +5,12 @@ set -e
|
|||
source .env
|
||||
|
||||
INPUT="${1}"
|
||||
EXIF="${2}"
|
||||
|
||||
SIZES=(
|
||||
"home:420"
|
||||
"full:1024"
|
||||
"full:1920"
|
||||
"bsky:2000"
|
||||
)
|
||||
|
||||
function img () {
|
||||
|
@ -26,6 +28,7 @@ for sizeRaw in ${SIZES[@]}; do
|
|||
name=${name%.*}
|
||||
output="${WWW}/img/${name}_${size}.jpg"
|
||||
img "${1}" "${output}" "${size}"
|
||||
exiftool -overwrite_original -@ "${EXIF}" "${output}"
|
||||
done
|
||||
|
||||
mv "${1}" "${PHOTOS}/"
|
||||
|
|
|
@ -1,12 +1,19 @@
|
|||
CREATE TABLE IF NOT EXISTS photos {
|
||||
id TEXT PRIMARY KEY,
|
||||
CREATE TABLE IF NOT EXISTS photos (
|
||||
name TEXT UNIQUE,
|
||||
original TEXT UNIQUE,
|
||||
hash TEXT UNIQUE,
|
||||
width INTEGER,
|
||||
height INTEGER,
|
||||
filmstock TEXT,
|
||||
discovered INTEGER,
|
||||
created INTEGER,
|
||||
updated INTEGER,
|
||||
|
||||
posted INTEGER DEFAULT 0,
|
||||
score INTEGER DEFAULT 0,
|
||||
deleted INTEGER DEFAULT 0
|
||||
}
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS version {
|
||||
CREATE TABLE IF NOT EXISTS version (
|
||||
id TEXT PRIMARY KEY,
|
||||
updated INTEGER UNIQUE
|
||||
}
|
||||
)
|
|
@ -2,7 +2,7 @@ import 'dotenv/config';
|
|||
import { createLog } from './log';
|
||||
import type { Logger } from 'winston';
|
||||
import { Templates } from './templates';
|
||||
import { Database } from 'sqlite3';
|
||||
|
||||
|
||||
class Build {
|
||||
private log : Logger;
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
import 'dotenv/config';
|
||||
|
||||
import { createLog } from '../log';
|
||||
import type { Logger } from 'winston';
|
||||
import { Database } from 'sqlite3';
|
||||
import { envString } from '../env';
|
||||
|
||||
interface Photos {
|
||||
name : string;
|
||||
hash : string;
|
||||
width : number;
|
||||
height : number;
|
||||
discovered ?: number;
|
||||
posted? : boolean;
|
||||
score? : number;
|
||||
}
|
||||
|
||||
export class DB {
|
||||
private db : Database;
|
||||
private log : Logger;
|
||||
|
||||
constructor () {
|
||||
this.log = createLog('db');
|
||||
this.db = new Database(envString('DB', 'data/site.db'));
|
||||
}
|
||||
|
||||
private async run (query : string, args : any[] = null) {
|
||||
return new Promise((resolve : Function, reject : Function) => {
|
||||
return this.db.run(query, args, (err : Error, rows : any[]) => {
|
||||
if (err) return reject(err);
|
||||
return resolve(true);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = { DB };
|
||||
export type { Photos };
|
|
@ -0,0 +1,13 @@
|
|||
export function envString (variable : string, defaultString : string) : string {
|
||||
return typeof process.env[variable] !== 'undefined' ? process.env[variable] : defaultString;
|
||||
}
|
||||
|
||||
export function envFloat (variable : string, defaultFloat : number ) : number {
|
||||
return typeof process.env[variable] !== 'undefined' ? parseFloat(process.env[variable]) : defaultFloat;
|
||||
}
|
||||
|
||||
export function envInt (variable : string, defaultInt : number ) : number {
|
||||
return typeof process.env[variable] !== 'undefined' ? parseInt(process.env[variable]) : defaultInt;
|
||||
}
|
||||
|
||||
module.exports = { envString, envFloat, envInt };
|
|
@ -0,0 +1,504 @@
|
|||
'use strict'
|
||||
import 'dotenv/config';
|
||||
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { join as pathJoin, basename } from 'path';
|
||||
import { readFile, writeFile, access } from 'fs/promises';
|
||||
import{ createReadStream } from 'fs';
|
||||
import { createHash } from 'crypto';
|
||||
import * as mime from 'mime-types';
|
||||
import { Endpoint, S3 } from 'aws-sdk';
|
||||
import { tmpdir } from 'os';
|
||||
import s3Stream = require('s3-upload-stream');
|
||||
|
||||
import { createLog } from '../log';
|
||||
import type { Logger } from 'winston';
|
||||
import { envString } from '../env';
|
||||
|
||||
const TMP_DIR = (envString('FILE3_DIR', null) !== null) ? envString('FILE3_DIR', '/tmp') : envString('TMP_DIR', tmpdir());
|
||||
|
||||
interface FileRecord {
|
||||
id : string;
|
||||
created : number;
|
||||
name : string;
|
||||
public : string;
|
||||
path : string;
|
||||
path_hash : string;
|
||||
hash : string;
|
||||
type : string;
|
||||
size : number;
|
||||
}
|
||||
|
||||
export class Files3 {
|
||||
private writeable : boolean = false;
|
||||
private s3 : S3;
|
||||
private s3Stream : any;
|
||||
private bucket : string;
|
||||
private endpoint : string;
|
||||
private log : Logger;
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
*
|
||||
*/
|
||||
|
||||
constructor (bucket : string, writeable : boolean = true) {
|
||||
const S3_ENDPOINT : string = envString('S3_ENDPOINT', 'http://127.0.0.1:9000');
|
||||
const spacesEndpoint : Endpoint = new Endpoint(S3_ENDPOINT);
|
||||
const s3Config = {
|
||||
accessKeyId: envString('S3_ACCESS_KEY', 'YOUR-ACCESSKEYID'),
|
||||
secretAccessKey: envString('S3_ACCESS_SECRET', 'YOUR-SECRETACCESSKEY'),
|
||||
endpoint: spacesEndpoint as unknown as string,
|
||||
signatureVersion: 'v4'
|
||||
};
|
||||
this.endpoint = S3_ENDPOINT;
|
||||
this.s3 = new S3(s3Config);
|
||||
this.s3Stream = s3Stream(this.s3);
|
||||
this.log = createLog('files3');
|
||||
|
||||
this.bucket = bucket;
|
||||
this.writeable = writeable;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a SHA256 hash of any data provided.
|
||||
**/
|
||||
private hash (data : any) {
|
||||
return createHash('sha256').update(data).digest('base64');
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if file exists
|
||||
**/
|
||||
private async exists (path : string) : Promise<boolean> {
|
||||
try {
|
||||
await access(path);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read file from disk as buffer and create hash of the data.
|
||||
**/
|
||||
private async hashFile (filePath : string) {
|
||||
let data;
|
||||
try {
|
||||
data = await readFile(filePath);
|
||||
} catch (err) {
|
||||
this.log.error(err);
|
||||
}
|
||||
return this.hash(data);
|
||||
}
|
||||
/**
|
||||
* create a file object on an S3 bucket and upload data.
|
||||
* Reads into memory
|
||||
**/
|
||||
public async create (file : any, keyName? : string) {
|
||||
if (!this.writeable) return false;
|
||||
const id : string = await this.hashFile(file);
|
||||
const ext : string | false = mime.extension(file.mimetype);
|
||||
const key : string = typeof keyName !== 'undefined' ? keyName : `${id}.${ext}`;
|
||||
const webPath : string = pathJoin('/files/', this.bucket, key);
|
||||
const publicPath : string = pathJoin(`${this.bucket}.${this.endpoint}`, key);
|
||||
const record : FileRecord = {
|
||||
id,
|
||||
created : +new Date(),
|
||||
name : file.originalname,
|
||||
public : publicPath,
|
||||
path : webPath,
|
||||
path_hash : this.hash(webPath),
|
||||
hash : null,
|
||||
type : file.mimetype,
|
||||
size : null
|
||||
};
|
||||
const params : S3.PutObjectRequest = {
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
Body: null
|
||||
};
|
||||
|
||||
params.Body = file.buffer;
|
||||
record.hash = this.hash(file.buffer);
|
||||
record.size = file.buffer.byteLength;
|
||||
|
||||
return new Promise((resolve : Function, reject : Function) => {
|
||||
return this.s3.putObject(params, function (err : Error, data : any) {
|
||||
if (err) {
|
||||
this.log.error(err)
|
||||
return reject(err)
|
||||
} else {
|
||||
this.log.info(`Saved file ${record.path}`);
|
||||
return resolve(record);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* Create an s3 record using only a path reference of a local file
|
||||
* Reads into memory
|
||||
*/
|
||||
|
||||
public async createFromPath (filePath : string, keyName? : string) {
|
||||
const filename : string = basename(filePath);
|
||||
const mimetype : string | false = mime.lookup(filePath);
|
||||
const key = typeof keyName !== 'undefined' ? keyName : null;
|
||||
let file : any;
|
||||
let buffer : any;
|
||||
|
||||
try {
|
||||
buffer = await readFile(filePath);
|
||||
} catch (err) {
|
||||
this.log.error('createFromPath', err);
|
||||
}
|
||||
|
||||
file = {
|
||||
buffer,
|
||||
mimetype,
|
||||
originalname : filename
|
||||
};
|
||||
|
||||
return this.create(file, key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an s3 record from a stream
|
||||
* Pass "createReadStream('')" object
|
||||
**/
|
||||
|
||||
/*public async createStream (file : any) {
|
||||
if (!this.writeable) return false;
|
||||
const id : string = uuid();
|
||||
const ext : string | false = mime.extension(file.mimetype);
|
||||
const key : string = `${id}.${ext}`;
|
||||
const webPath : string = pathJoin('/files/', this.bucket, key);
|
||||
const publicPath : string = pathJoin(`${this.bucket}.${this.endpoint}`, key);
|
||||
const record : FileRecord = {
|
||||
id,
|
||||
created : +new Date(),
|
||||
name : file.originalname,
|
||||
public : publicPath,
|
||||
path : webPath,
|
||||
path_hash : this.hash(webPath),
|
||||
hash : null,
|
||||
type : file.mimetype,
|
||||
size : null
|
||||
};
|
||||
const params : S3Params = {
|
||||
Bucket: this.bucket,
|
||||
Key: key
|
||||
};
|
||||
const upload = this.s3Stream.upload(params);
|
||||
|
||||
upload.maxPartSize(20971520); // 20 MB
|
||||
upload.concurrentParts(5);
|
||||
|
||||
return new Promise((resolve : Function, reject : Function) => {
|
||||
upload.on('error', (err : Error) => {
|
||||
return reject(err);
|
||||
});
|
||||
upload.on('part', (details : any) => {
|
||||
this.log.info(`${details.ETag} - part: ${details.PartNumber} received: ${details.receivedSize} uploaded: ${details.uploadedSize}`)
|
||||
});
|
||||
upload.on('uploaded', (details : any) => {
|
||||
record.hash = details.ETag;
|
||||
record.size = details.uploadedSize;
|
||||
this.log.info(`Saved file ${record.path}`);
|
||||
return resolve(record);
|
||||
});
|
||||
this.log.info(`Streaming ${record.path} to S3`);
|
||||
stream.pipe(upload);
|
||||
});
|
||||
}*/
|
||||
|
||||
/**
|
||||
* Create a stream . Bind to busboy.on('file', files3.createStream)
|
||||
* ex. (express POST route callback)
|
||||
* var busboy = new Busboy({ headers: req.headers });
|
||||
* busboy.on('file', files3.createStream)
|
||||
* req.pipe(busboy);
|
||||
**/
|
||||
|
||||
/* public async createStreamExpress (fieldname : string, file : any, filename : string, encoding : any, mimetype : string) {
|
||||
if (!this.writeable) return false;
|
||||
const id : string = uuid();
|
||||
const ext : string | false = mime.extension(mimetype);
|
||||
const key : string = `${id}.${ext}`;
|
||||
const webPath : string = pathJoin('/files/', this.bucket, key);
|
||||
const publicPath : string = pathJoin(`${this.bucket}.${this.endpoint}`, key);
|
||||
const record : FileRecord = {
|
||||
id,
|
||||
created : +new Date(),
|
||||
name : filename,
|
||||
public : publicPath,
|
||||
path : webPath,
|
||||
path_hash : this.hash(webPath),
|
||||
hash : null,
|
||||
type : file.mimetype,
|
||||
size : null
|
||||
};
|
||||
const params : S3Params = {
|
||||
Bucket: this.bucket,
|
||||
Key: key
|
||||
};
|
||||
const upload = this.s3Stream.upload(params);
|
||||
|
||||
upload.maxPartSize(20971520); // 20 MB
|
||||
upload.concurrentParts(5);
|
||||
|
||||
return new Promise((resolve : Function, reject : Function) => {
|
||||
var s3 = new AWS.S3({
|
||||
params: {Bucket: 'sswa', Key: filename, Body: file},
|
||||
options: {partSize: 5 * 1024 * 1024, queueSize: 10} // 5 MB
|
||||
});
|
||||
s3.upload().on('httpUploadProgress', function (evt) {
|
||||
this.log.info(evt);
|
||||
}).send(function (err, data) {
|
||||
s3UploadFinishTime = new Date();
|
||||
if(busboyFinishTime && s3UploadFinishTime) {
|
||||
res.json({
|
||||
uploadStartTime: uploadStartTime,
|
||||
busboyFinishTime: busboyFinishTime,
|
||||
s3UploadFinishTime: s3UploadFinishTime
|
||||
});
|
||||
}
|
||||
this.log.info(err, data);
|
||||
});
|
||||
file.on('data', ( data : any ) => {
|
||||
upload.on('error', (err : Error) => {
|
||||
return reject(err);
|
||||
});
|
||||
upload.on('part', (details : any) => {
|
||||
this.log.info(`${details.ETag} - part: ${details.PartNumber} received: ${details.receivedSize} uploaded: ${details.uploadedSize}`)
|
||||
});
|
||||
upload.on('uploaded', (details : any) => {
|
||||
record.hash = details.ETag;
|
||||
record.size = details.uploadedSize;
|
||||
this.log.info(`Saved file ${record.path}`)
|
||||
return resolve(record)
|
||||
});
|
||||
data.pipe(upload);
|
||||
})
|
||||
});
|
||||
}*/
|
||||
|
||||
/**
|
||||
* Create a stream from a path on the local device
|
||||
*
|
||||
* @param {string} filePath Path to file
|
||||
* @param {string} keyName (optional) Predefined key
|
||||
**/
|
||||
|
||||
public async createStreamFromPath (filePath : string, keyName?: string) {
|
||||
if (!this.writeable) return false;
|
||||
const id : string = uuid();
|
||||
const fileName : string = basename(filePath);
|
||||
const mimetype : string | false = mime.lookup(filePath);
|
||||
const ext : string | false = mime.extension(fileName);
|
||||
const key : string = typeof keyName !== 'undefined' ? keyName : `${id}.${ext}`;
|
||||
const webPath : string = pathJoin('/files/', this.bucket, key);
|
||||
const publicPath : string = pathJoin(`${this.bucket}.${this.endpoint}`, key);
|
||||
const record : FileRecord = {
|
||||
id,
|
||||
created : +new Date(),
|
||||
name : fileName,
|
||||
public : publicPath,
|
||||
path : webPath,
|
||||
path_hash : this.hash(webPath),
|
||||
hash : null,
|
||||
type : mimetype ? mimetype : null,
|
||||
size : null
|
||||
};
|
||||
const params : S3.PutObjectRequest = {
|
||||
Bucket: this.bucket,
|
||||
Key: key
|
||||
};
|
||||
const upload = this.s3Stream.upload(params);
|
||||
const stream = createReadStream(filePath);
|
||||
|
||||
upload.maxPartSize(20971520); // 20 MB
|
||||
upload.concurrentParts(5);
|
||||
|
||||
return new Promise((resolve : Function, reject : Function) => {
|
||||
upload.on('error', (err : Error) => {
|
||||
return reject(err);
|
||||
});
|
||||
upload.on('part', (details : any) => {
|
||||
this.log.info(`${details.ETag} - part: ${details.PartNumber} received: ${details.receivedSize} uploaded: ${details.uploadedSize}`)
|
||||
});
|
||||
upload.on('uploaded', (details : any) => {
|
||||
record.hash = details.ETag;
|
||||
record.size = details.uploadedSize;
|
||||
this.log.info(`Saved file ${record.path}`);
|
||||
return resolve(record);
|
||||
});
|
||||
this.log.info(`Streaming ${record.path} to S3`);
|
||||
stream.pipe(upload);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a file from S3 using a key
|
||||
*
|
||||
* @param {string} key File key
|
||||
*
|
||||
* @returns {string} File data
|
||||
**/
|
||||
|
||||
public async read (key : string) {
|
||||
const params : S3.PutObjectRequest = {
|
||||
Bucket: this.bucket,
|
||||
Key: key
|
||||
};
|
||||
return new Promise((resolve : Function, reject : Function) => {
|
||||
return this.s3.getObject(params, (err : Error, data : any) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
return resolve(data.Body) //buffer
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
const xhr = new XMLHttpRequest();
|
||||
xhr.open('PUT', signedUrl);
|
||||
xhr.setRequestHeader('Content-Type', file.type);
|
||||
xhr.setRequestHeader('x-amz-acl', 'public-read');
|
||||
xhr.send(file);
|
||||
*/
|
||||
|
||||
/**
|
||||
* Get a signed put key for writing
|
||||
*
|
||||
* @param {string} key Key that file will be located at
|
||||
* @param {string} fileType Mimetype of file
|
||||
*
|
||||
* @returns {string} Url of signed key
|
||||
**/
|
||||
public async signedPutKey (key : string, fileType : string) {
|
||||
const s3Params : S3.PutObjectRequest = {
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
ContentType: fileType,
|
||||
Expires: new Date((new Date()).getTime() + 24 * 60 * 60 * 1000) //1 day
|
||||
//ACL: 'public-read',
|
||||
|
||||
};
|
||||
return new Promise((resolve : Function, reject : Function) => {
|
||||
return this.s3.getSignedUrl('putObject', s3Params, (err : Error, url : string) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
return resolve(url);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a signed read key for writing
|
||||
*
|
||||
* @param {string} key Key that file will be located at
|
||||
*
|
||||
* @returns {string} Url of signed key
|
||||
**/
|
||||
public async signedGetKey (key : string) {
|
||||
const s3Params : S3.GetObjectRequest = {
|
||||
Bucket: this.bucket,
|
||||
Key: key,
|
||||
//Expires: new Date((new Date()).getTime() + 24 * 60 * 60 * 1000) //1 day
|
||||
//ACL: 'public-read',
|
||||
//Expires: 60 ?
|
||||
};
|
||||
return new Promise((resolve : Function, reject : Function) => {
|
||||
return this.s3.getSignedUrl('getObject', s3Params, (err : Error, url : string) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
return resolve(url);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
readStream (to express or server)
|
||||
s3.getObject(params)
|
||||
.on('httpHeaders', function (statusCode, headers) {
|
||||
res.set('Content-Length', headers['content-length']);
|
||||
res.set('Content-Type', headers['content-type']);
|
||||
this.response.httpResponse.createUnbufferedStream()
|
||||
.pipe(res);
|
||||
})
|
||||
.send();
|
||||
--------
|
||||
var fileStream = fs.createWriteStream('/path/to/file.jpg');
|
||||
var s3Stream = s3.getObject({Bucket: 'myBucket', Key: 'myImageFile.jpg'}).createReadStream();
|
||||
|
||||
// Listen for errors returned by the service
|
||||
s3Stream.on('error', function(err) {
|
||||
// NoSuchKey: The specified key does not exist
|
||||
this.log.error(err);
|
||||
});
|
||||
|
||||
s3Stream.pipe(fileStream).on('error', function(err) {
|
||||
// capture any errors that occur when writing data to the file
|
||||
this.log.error('File Stream:', err);
|
||||
}).on('close', function() {
|
||||
this.log.info('Done.');
|
||||
});
|
||||
*/
|
||||
|
||||
/**
|
||||
* Delete an object at a specific key
|
||||
*
|
||||
* @param {string} key Key for object
|
||||
*
|
||||
* @returns {boolean} True if successful
|
||||
**/
|
||||
|
||||
public async delete (key : string) {
|
||||
if (!this.writeable) return false;
|
||||
const params : S3.DeleteObjectRequest = {
|
||||
Bucket : this.bucket,
|
||||
Key : key
|
||||
};
|
||||
return new Promise((resolve : Function, reject : Function) => {
|
||||
return this.s3.deleteObject(params, (err : Error, data : any) => {
|
||||
if (err) {
|
||||
this.log.error(err);
|
||||
return reject(err);
|
||||
}
|
||||
return resolve(true) //buffer
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists all objects with a specific prefix
|
||||
**/
|
||||
|
||||
public async list (prefix : string) {
|
||||
const params : S3.ListObjectsV2Request = {
|
||||
Bucket: this.bucket,
|
||||
Prefix: prefix
|
||||
};
|
||||
return new Promise((resolve : Function, reject : Function) => {
|
||||
return this.s3.listObjectsV2(params, (err : Error, data : any) => {
|
||||
if (err) {
|
||||
this.log.error(err);
|
||||
return reject(err);
|
||||
}
|
||||
return resolve(data);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
public async update () {
|
||||
if (!this.writeable) return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
module.exports = { Files3 };
|
118
src/generate.ts
118
src/generate.ts
|
@ -1,26 +1,48 @@
|
|||
import 'dotenv/config';
|
||||
import { createLog } from './log';
|
||||
import type { Logger } from 'winston';
|
||||
import { Database } from 'sqlite3';
|
||||
import { readFile, readdir, realpath } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { join, basename } from 'path';
|
||||
import * as sizeOf from 'image-size';
|
||||
import { promisify } from 'util';
|
||||
import { Shell } from './shell';
|
||||
import { hash } from './hash';
|
||||
import { Files3 } from './files3'
|
||||
import { envString } from './env';
|
||||
import { DB } from './db';
|
||||
|
||||
interface Metadata {
|
||||
year? : number;
|
||||
month? : number;
|
||||
day? : number;
|
||||
format?: string;
|
||||
filmstock?: string;
|
||||
location? : string;
|
||||
description? : string;
|
||||
width? : number;
|
||||
height? : number;
|
||||
original?: string;
|
||||
}
|
||||
|
||||
class Generate {
|
||||
private log : Logger;
|
||||
private files : string[];
|
||||
private inbox : string = typeof process.env.INBOX !== 'undefined' ? process.env.INBOX : '~/Photos/toprocess';
|
||||
private inbox : string = envString('INBOX', '~/Photos/toprocess');
|
||||
private photos : string = envString('PHOTOS', '~/Photos/processed');
|
||||
private s3 : Files3;
|
||||
private db : DB;
|
||||
|
||||
constructor () {
|
||||
this.log = createLog('generate');
|
||||
this.log.info(`Generating site: ${new Date()}`);
|
||||
this.db = new DB();
|
||||
this.s3 = new Files3(envString('S3_BUCKET', 'mmcwilliamsphotos'), true);
|
||||
this.generate();
|
||||
|
||||
}
|
||||
|
||||
private async generate () {
|
||||
//check version
|
||||
|
||||
//sync
|
||||
await this.checkInbox();
|
||||
//validate
|
||||
|
@ -29,6 +51,9 @@ class Generate {
|
|||
private async checkInbox () {
|
||||
let inbox : string;
|
||||
let images : string[];
|
||||
let filename : string;
|
||||
let meta : Metadata;
|
||||
let dimensions : any;
|
||||
|
||||
try {
|
||||
inbox = await realpath(this.inbox);
|
||||
|
@ -44,7 +69,7 @@ class Generate {
|
|||
return;
|
||||
}
|
||||
|
||||
images = images.filter(el => {
|
||||
images = images.filter((el : string) => {
|
||||
if (el.toLowerCase().indexOf('.jpg') !== -1
|
||||
|| el.toLowerCase().indexOf('.jpeg') !== -1
|
||||
|| el.toLowerCase().indexOf('.tif') !== -1
|
||||
|
@ -59,12 +84,23 @@ class Generate {
|
|||
return;
|
||||
}
|
||||
|
||||
images = images.map(el => join(inbox, el));
|
||||
console.dir(images)
|
||||
images = await Promise.all(images.map(async (el : any) : Promise<string> => {
|
||||
return await realpath(join(inbox, el));
|
||||
})
|
||||
);
|
||||
for (let image of images) {
|
||||
this.log.info(image);
|
||||
filename = basename(image);
|
||||
meta = this.parseFilename(filename);
|
||||
dimensions = await this.getImageDimensions(image);
|
||||
meta.width = dimensions.width;
|
||||
meta.height = dimensions.height;
|
||||
console.dir(meta)
|
||||
}
|
||||
}
|
||||
|
||||
private async img (file : string) {
|
||||
const cmd : string[] = ['bash', 'scripts/img.sh', file];
|
||||
private async img (file : string, exif : string) {
|
||||
const cmd : string[] = ['bash', 'scripts/img.sh', file, exif];
|
||||
const shell : Shell = new Shell(cmd);
|
||||
try {
|
||||
await shell.execute();
|
||||
|
@ -74,6 +110,70 @@ class Generate {
|
|||
}
|
||||
this.log.info(`Processed image file for ${file}`);
|
||||
}
|
||||
|
||||
async getImageDimensions (imagePath: string): Promise<{ width: number, height: number }> {
|
||||
let dimensions : any;
|
||||
try {
|
||||
dimensions = await sizeOf(imagePath);
|
||||
return dimensions;
|
||||
} catch (err) {
|
||||
this.log.error('Error getting image dimensions:', err);
|
||||
}
|
||||
}
|
||||
|
||||
private capitalize (str : string) : string {
|
||||
return (str.substring(0, 1).toUpperCase()) + str.substring(1);
|
||||
}
|
||||
|
||||
private formatProperNouns (str : string) : string {
|
||||
let parts : string[] = str.split('-');
|
||||
parts = parts.map(el => this.capitalize(el));
|
||||
return parts.join(' ');
|
||||
}
|
||||
|
||||
//year
|
||||
//month
|
||||
//day
|
||||
//format
|
||||
//filmstock
|
||||
//location
|
||||
//description
|
||||
//original
|
||||
|
||||
//2024_12_02_35mm_Kodak-Gold-200_Somerville-MA_Walk-with-Charlie#000061280009.tif
|
||||
|
||||
private parseFilename (filename : string) : Metadata {
|
||||
const halves : string[] = filename.split('#')
|
||||
const parts : string[] = halves[0].split('');
|
||||
let meta : Metadata = {};
|
||||
for (let i = 0; i < parts.length; i++) {
|
||||
switch (i) {
|
||||
case 0 :
|
||||
meta.year = parseInt(parts[i]);
|
||||
break;
|
||||
case 1 :
|
||||
meta.month = parseInt(parts[i]);
|
||||
break;
|
||||
case 2 :
|
||||
meta.day = parseInt(parts[i]);
|
||||
break;
|
||||
case 3 :
|
||||
meta.format = parts[i];
|
||||
break;
|
||||
case 4:
|
||||
meta.filmstock = parts[i].split('-').join(' ');
|
||||
break;
|
||||
case 5 :
|
||||
meta.location = parts[i].split('-').join(' ');
|
||||
break;
|
||||
case 6 :
|
||||
meta.description = parts[i].split('-').join(' ');
|
||||
break;
|
||||
}
|
||||
}
|
||||
meta.original = halves[1];
|
||||
return meta;
|
||||
}
|
||||
}
|
||||
|
||||
new Generate();
|
|
@ -1,3 +1,3 @@
|
|||
<a href="{{full}}">
|
||||
<img id="{{public_id}}" src="{{home}}" loading="lazy" class="lazy" width="1024" alt="{{alt}}" title="{{title}}" />
|
||||
<img id="{{public_id}}" src="{{home}}" loading="lazy" class="lazy" width="420" alt="{{alt}}" title="{{title}}" />
|
||||
</a>
|
||||
|
|
Loading…
Reference in New Issue