Switch config to .env

This commit is contained in:
Pitu 2019-02-19 23:52:24 +09:00
parent 2e0cbd3ea7
commit 89a271818e
21 changed files with 212 additions and 185 deletions

View File

@ -1,11 +1,20 @@
require('dotenv').config();
import autoprefixer from 'autoprefixer';
import serveStatic from 'serve-static';
import path from 'path';
import config from './config';
export default {
server: {
port: config.server.ports.frontend
port: process.env.WEBSITE_PORT
},
env: {
version: process.env.npm_package_version,
URL: process.env.DOMAIN,
baseURL: `${process.env.DOMAIN}${process.env.ROUTE_PREFIX}`,
serviceName: process.env.SERVICE_NAME,
maxFileSize: process.env.MAX_SIZE,
chunkSize: process.env.CHUNK_SIZE,
maxLinksPerAlbum: process.env.MAX_LINKS_PER_ALBUM
},
srcDir: 'src/site/',
head: {

View File

@ -36,6 +36,7 @@
"chalk": "^2.4.1",
"compression": "^1.7.2",
"cors": "^2.8.4",
"dotenv": "^6.2.0",
"dumper.js": "^1.1.1",
"express": "^4.16.3",
"express-rate-limit": "^2.11.0",

View File

@ -1,6 +1,4 @@
const Route = require('../../structures/Route');
const config = require('../../../../config');
const db = require('knex')(config.server.database);
const Util = require('../../utils/Util');
const log = require('../../utils/Log');
@ -9,7 +7,7 @@ class albumDELETE extends Route {
super('/album/:id/:purge*?', 'delete');
}
async run(req, res, user) {
async run(req, res, db, user) {
const { id, purge } = req.params;
if (!id) return res.status(400).json({ message: 'Invalid album ID supplied' });

View File

@ -1,6 +1,4 @@
const Route = require('../../structures/Route');
const config = require('../../../../config');
const db = require('knex')(config.server.database);
const Util = require('../../utils/Util');
class albumGET extends Route {
@ -8,7 +6,7 @@ class albumGET extends Route {
super('/album/:identifier', 'get', { bypassAuth: true });
}
async run(req, res) {
async run(req, res, db) {
const { identifier } = req.params;
if (!identifier) return res.status(400).json({ message: 'Invalid identifier supplied' });

View File

@ -1,6 +1,4 @@
const Route = require('../../structures/Route');
const config = require('../../../../config');
const db = require('knex')(config.server.database);
const moment = require('moment');
class albumPOST extends Route {
@ -8,7 +6,7 @@ class albumPOST extends Route {
super('/album/new', 'post');
}
async run(req, res, user) {
async run(req, res, db, user) {
if (!req.body) return res.status(400).json({ message: 'No body provided' });
const { name } = req.body;
if (!name) return res.status(400).json({ message: 'No name provided' });

View File

@ -1,6 +1,4 @@
const Route = require('../../structures/Route');
const config = require('../../../../config');
const db = require('knex')(config.server.database);
const Util = require('../../utils/Util');
const log = require('../../utils/Log');
const path = require('path');
@ -11,7 +9,7 @@ class albumGET extends Route {
super('/album/:identifier/zip', 'get', { bypassAuth: true });
}
async run(req, res) {
async run(req, res, db) {
const { identifier } = req.params;
if (!identifier) return res.status(400).json({ message: 'Invalid identifier supplied' });
@ -31,7 +29,7 @@ class albumGET extends Route {
If the date when the album was zipped is greater than the album's last edit, we just send the zip to the user
*/
if (album.zippedAt > album.editedAt) {
const filePath = path.join(__dirname, '..', '..', '..', '..', config.uploads.uploadFolder, 'zips', `${album.userId}-${album.id}.zip`);
const filePath = path.join(__dirname, '..', '..', '..', '..', process.env.UPLOAD_FOLDER, 'zips', `${album.userId}-${album.id}.zip`);
const exists = await jetpack.existsAsync(filePath);
/*
Make sure the file exists just in case, and if not, continue to it's generation.
@ -65,7 +63,7 @@ class albumGET extends Route {
Util.createZip(filesToZip, album);
await db.table('albums').where('id', link.albumId).update('zippedAt', db.fn.now());
const filePath = path.join(__dirname, '..', '..', '..', '..', config.uploads.uploadFolder, 'zips', `${album.userId}-${album.id}.zip`);
const filePath = path.join(__dirname, '..', '..', '..', '..', process.env.UPLOAD_FOLDER, 'zips', `${album.userId}-${album.id}.zip`);
const fileName = `lolisafe-${identifier}.zip`;
return res.download(filePath, fileName);
} catch (error) {

View File

@ -1,6 +1,4 @@
const Route = require('../../structures/Route');
const config = require('../../../../config');
const db = require('knex')(config.server.database);
const Util = require('../../utils/Util');
class albumsGET extends Route {
@ -8,7 +6,7 @@ class albumsGET extends Route {
super('/albums/mini', 'get');
}
async run(req, res, user) {
async run(req, res, db, user) {
/*
Let's fetch the albums. This route will only return a small portion
of the album files for displaying on the dashboard. It's probably useless
@ -72,7 +70,7 @@ class albumsDropdownGET extends Route {
super('/albums/dropdown', 'get');
}
async run(req, res, user) {
async run(req, res, db, user) {
const albums = await db.table('albums')
.where('userId', user.id)
.select('id', 'name');

View File

@ -1,6 +1,4 @@
const Route = require('../../../structures/Route');
const config = require('../../../../../config');
const db = require('knex')(config.server.database);
const log = require('../../../utils/Log');
class linkEditPOST extends Route {
@ -8,7 +6,7 @@ class linkEditPOST extends Route {
super('/album/link/edit', 'post');
}
async run(req, res, user) {
async run(req, res, db, user) {
if (!req.body) return res.status(400).json({ message: 'No body provided' });
const { identifier, enabled, enableDownload, expiresAt } = req.body;
if (!identifier) return res.status(400).json({ message: 'Invalid album identifier supplied' });

View File

@ -1,6 +1,4 @@
const Route = require('../../../structures/Route');
const config = require('../../../../../config');
const db = require('knex')(config.server.database);
const Util = require('../../../utils/Util');
const log = require('../../../utils/Log');
@ -9,7 +7,7 @@ class linkPOST extends Route {
super('/album/link/new', 'post');
}
async run(req, res, user) {
async run(req, res, db, user) {
if (!req.body) return res.status(400).json({ message: 'No body provided' });
const { albumId } = req.body;
if (!albumId) return res.status(400).json({ message: 'No album provided' });
@ -24,7 +22,7 @@ class linkPOST extends Route {
Count the amount of links created for that album already and error out if max was reached
*/
const count = await db.table('links').where('albumId', albumId).count({ count: 'id' });
if (count[0].count >= config.albums.maxLinksPerAlbum) return res.status(400).json({ message: 'Maximum links per album reached' });
if (count[0].count >= process.env.MAX_LINKS_PER_ALBUM) return res.status(400).json({ message: 'Maximum links per album reached' });
/*
Try to allocate a new identifier on the db

View File

@ -1,7 +1,5 @@
const Route = require('../../structures/Route');
const config = require('../../../../config');
const log = require('../../utils/Log');
const db = require('knex')(config.server.database);
const bcrypt = require('bcrypt');
const moment = require('moment');
@ -10,7 +8,7 @@ class changePasswordPOST extends Route {
super('/auth/password/change', 'post');
}
async run(req, res, user) {
async run(req, res, db, user) {
if (!req.body) return res.status(400).json({ message: 'No body provided' });
const { password, newPassword } = req.body;
if (!password || !newPassword) return res.status(401).json({ message: 'Invalid body provided' });

View File

@ -1,7 +1,5 @@
const Route = require('../../structures/Route');
const config = require('../../../../config');
const log = require('../../utils/Log');
const db = require('knex')(config.server.database);
const bcrypt = require('bcrypt');
const randomstring = require('randomstring');
const moment = require('moment');
@ -11,8 +9,8 @@ class registerPOST extends Route {
super('/auth/register', 'post', { bypassAuth: true });
}
async run(req, res) {
if (!config.enableCreateUserAccounts) return res.status(401).json({ message: 'Creation of new accounts is currently disabled' });
async run(req, res, db) {
if (!process.env.USER_ACCOUNTS) return res.status(401).json({ message: 'Creation of new accounts is currently disabled' });
if (!req.body) return res.status(400).json({ message: 'No body provided' });
const { username, password } = req.body;
if (!username || !password) return res.status(401).json({ message: 'Invalid body provided' });

View File

@ -1,22 +0,0 @@
const Route = require('../structures/Route');
const config = require('../../../config');
class configGET extends Route {
constructor() {
super('/config', 'get', { bypassAuth: true });
}
run(req, res) {
return res.json({
version: process.env.npm_package_version,
URL: config.filesServeLocatio,
baseURL: config.backendLocation,
serviceName: config.serviceName,
maxFileSize: config.uploads.uploadMaxSize,
chunkSize: config.uploads.chunkSize,
maxLinksPerAlbum: config.albums.maxLinksPerAlbum
});
}
}
module.exports = configGET;

View File

@ -1,6 +1,4 @@
const Route = require('../../structures/Route');
const config = require('../../../../config');
const db = require('knex')(config.server.database);
const Util = require('../../utils/Util');
const log = require('../../utils/Log');
@ -9,7 +7,7 @@ class fileDELETE extends Route {
super('/file/:id', 'delete');
}
async run(req, res, user) {
async run(req, res, db, user) {
const { id } = req.params;
if (!id) return res.status(400).json({ message: 'Invalid file ID supplied' });

View File

@ -1,6 +1,4 @@
const Route = require('../../structures/Route');
const config = require('../../../../config');
const db = require('knex')(config.server.database);
const Util = require('../../utils/Util');
class filesGET extends Route {
@ -8,7 +6,7 @@ class filesGET extends Route {
super('/files', 'get');
}
async run(req, res, user) {
async run(req, res, db, user) {
/*
Get all the files from the user
*/

View File

@ -1,8 +1,6 @@
const Route = require('../../structures/Route');
const config = require('../../../../config');
const path = require('path');
const Util = require('../../utils/Util');
const db = require('knex')(config.server.database);
const moment = require('moment');
const log = require('../../utils/Log');
const jetpack = require('fs-jetpack');
@ -22,13 +20,13 @@ class uploadPOST extends Route {
super('/upload', 'post', { bypassAuth: true });
}
async run(req, res) {
async run(req, res, db) {
const user = await Util.isAuthorized(req);
if (!user && !config.uploads.allowAnonymousUploads) return res.status(401).json({ message: 'Not authorized to use this resource' });
return this.uploadFile(req, res, user);
if (!user && !process.env.PUBLIC_MODE) return res.status(401).json({ message: 'Not authorized to use this resource' });
return this.uploadFile(req, res, db, user);
}
async processFile(req, res, user, file) {
async processFile(req, res, db, user, file) {
/*
Check if the user is trying to upload to an album
*/
@ -55,38 +53,37 @@ class uploadPOST extends Route {
We got a chunk that is not the last part, send smoke signal that we received it.
*/
return res.json({ message: 'Successfully uploaded chunk' });
} else {
/*
Seems we finally got the last part of a chunk upload
*/
const uploadsDir = path.join(__dirname, '..', '..', '..', '..', config.uploads.uploadFolder);
const chunkedFileDir = path.join(__dirname, '..', '..', '..', '..', config.uploads.uploadFolder, 'chunks', file.body.uuid);
const chunkFiles = await jetpack.findAsync(chunkedFileDir, { matching: '*' });
const originalname = Util.getFilenameFromPath(chunkFiles[0].substring(0, chunkFiles[0].lastIndexOf('.')));
}
/*
Seems we finally got the last part of a chunk upload
*/
const uploadsDir = path.join(__dirname, '..', '..', '..', '..', process.env.UPLOAD_FOLDER);
const chunkedFileDir = path.join(__dirname, '..', '..', '..', '..', process.env.UPLOAD_FOLDER, 'chunks', file.body.uuid);
const chunkFiles = await jetpack.findAsync(chunkedFileDir, { matching: '*' });
const originalname = Util.getFilenameFromPath(chunkFiles[0].substring(0, chunkFiles[0].lastIndexOf('.')));
const tempFile = {
filename: Util.getUniqueFilename(originalname),
originalname,
size: file.body.totalfilesize
};
for (const chunkFile of chunkFiles) {
try {
const data = await jetpack.readAsync(chunkFile, 'buffer'); // eslint-disable-line no-await-in-loop
await jetpack.appendAsync(path.join(uploadsDir, tempFile.filename), data); // eslint-disable-line no-await-in-loop
} catch (error) {
log.error(error);
}
}
const tempFile = {
filename: Util.getUniqueFilename(originalname),
originalname,
size: file.body.totalfilesize
};
for (const chunkFile of chunkFiles) {
try {
await jetpack.removeAsync(chunkedFileDir);
const data = await jetpack.readAsync(chunkFile, 'buffer'); // eslint-disable-line no-await-in-loop
await jetpack.appendAsync(path.join(uploadsDir, tempFile.filename), data); // eslint-disable-line no-await-in-loop
} catch (error) {
log.error(error);
}
upload = tempFile;
}
try {
await jetpack.removeAsync(chunkedFileDir);
} catch (error) {
log.error(error);
}
upload = tempFile;
}
/*
@ -109,7 +106,7 @@ class uploadPOST extends Route {
message: 'Successfully uploaded file BUT IT EXISTED ALREADY',
name: exists.name,
size: exists.size,
url: `${config.filesServeLocation}/${exists.name}`
url: `${process.env.DOMAIN}/${exists.name}`
});
return Util.deleteFile(upload.filename);
@ -147,7 +144,7 @@ class uploadPOST extends Route {
message: 'Successfully uploaded file',
name: upload.filename,
size: upload.size,
url: `${config.filesServeLocation}/${upload.filename}`
url: `${process.env.DOMAIN}/${upload.filename}`
});
/*
@ -167,7 +164,7 @@ class uploadPOST extends Route {
/*
If exif removal has been force service-wide or requested by the user, remove it
*/
if (config.uploads.forceStripExif) { // || user.settings.stripExif) {
if (process.env.STRIP_EXIF) { // || user.settings.stripExif) {
// Util.removeExif(upload.filename);
}
@ -177,11 +174,11 @@ class uploadPOST extends Route {
return Util.generateThumbnails(upload.filename);
}
uploadFile(req, res, user) {
uploadFile(req, res, db, user) {
const busboy = new Busboy({
headers: req.headers,
limits: {
fileSize: config.uploads.uploadMaxSize * (1000 * 1000),
fileSize: process.env.MAX_SIZE * (1000 * 1000),
files: 1
}
});
@ -209,7 +206,8 @@ class uploadPOST extends Route {
Hey ther's a file! Let's upload it.
*/
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
let name, saveTo;
let name;
let saveTo;
/*
Let check whether the file is part of a chunk upload or if it's a standalone one.
@ -219,15 +217,15 @@ class uploadPOST extends Route {
const ext = path.extname(filename).toLowerCase();
if (Util.isExtensionBlocked(ext)) return res.status(400).json({ message: 'This extension is not allowed.' });
if (!fileToUpload.body.uuid) {
if (fileToUpload.body.uuid) {
name = `${filename}.${fileToUpload.body.chunkindex}`;
const chunkDir = path.join(__dirname, '..', '..', '..', '..', process.env.UPLOAD_FOLDER, 'chunks', fileToUpload.body.uuid);
jetpack.dir(chunkDir);
saveTo = path.join(__dirname, '..', '..', '..', '..', process.env.UPLOAD_FOLDER, 'chunks', fileToUpload.body.uuid, name);
} else {
name = Util.getUniqueFilename(filename);
if (!name) return res.status(500).json({ message: 'There was a problem allocating a filename for your upload' });
saveTo = path.join(__dirname, '..', '..', '..', '..', config.uploads.uploadFolder, name);
} else {
name = `${filename}.${fileToUpload.body.chunkindex}`;
const chunkDir = path.join(__dirname, '..', '..', '..', '..', config.uploads.uploadFolder, 'chunks', fileToUpload.body.uuid);
jetpack.dir(chunkDir);
saveTo = path.join(__dirname, '..', '..', '..', '..', config.uploads.uploadFolder, 'chunks', fileToUpload.body.uuid, name);
saveTo = path.join(__dirname, '..', '..', '..', '..', process.env.UPLOAD_FOLDER, name);
}
/*
@ -269,7 +267,7 @@ class uploadPOST extends Route {
return res.status(500).json({ message: 'There was an error uploading the file.' });
});
busboy.on('finish', () => this.processFile(req, res, user, fileToUpload));
busboy.on('finish', () => this.processFile(req, res, db, user, fileToUpload));
req.pipe(busboy);
}
}

View File

@ -5,7 +5,7 @@ class verifyGET extends Route {
super('/verify', 'get');
}
run(req, res, user) {
run(req, res, db, user) {
const returnUser = {
id: user.id,
username: user.username,

View File

@ -1,6 +1,13 @@
const JWT = require('jsonwebtoken');
const { server } = require('../../../config');
const db = require('knex')(server.database);
const db = require('knex')({
client: process.env.DB_CLIENT,
connection: {
host: process.env.DB_HOST,
user: process.env.DB_USER,
password: process.env.DB_PASS,
database: process.env.DB_DATABASE
}
});
const moment = require('moment');
const log = require('../utils/Log');
@ -15,12 +22,12 @@ class Route {
}
authorize(req, res) {
if (this.options.bypassAuth) return this.run(req, res);
if (this.options.bypassAuth) return this.run(req, res, db);
if (!req.headers.authorization) return res.status(401).json({ message: 'No authorization header provided' });
const token = req.headers.authorization.split(' ')[1];
if (!token) return res.status(401).json({ message: 'No authorization header provided' });
return JWT.verify(token, server.secret, async (error, decoded) => {
return JWT.verify(token, process.env.SECRET, async (error, decoded) => {
if (error) {
log.error(error);
return res.status(401).json({ message: 'Your token appears to be invalid' });
@ -33,13 +40,18 @@ class Route {
if (iat && iat < moment(user.passwordEditedAt).format('x')) return res.status(401).json({ message: 'Token expired' });
if (!user.enabled) return res.status(401).json({ message: 'This account has been disabled' });
return this.run(req, res, user);
return this.run(req, res, db, user);
});
}
run(req, res, user) { // eslint-disable-line no-unused-vars
run(req, res, db) { // eslint-disable-line no-unused-vars
return;
}
error(res, error) {
log.error(error);
return res.status(500).json({ message: 'There was a problem parsing the request' });
}
}
module.exports = Route;

View File

@ -1,4 +1,5 @@
const config = require('../../../config');
require('dotenv').config();
const log = require('../utils/Log');
const express = require('express');
const helmet = require('helmet');
@ -8,17 +9,16 @@ const bodyParser = require('body-parser');
const jetpack = require('fs-jetpack');
const path = require('path');
const Database = require('./Database');
const oneliner = require('one-liner');
const rateLimiter = new RateLimit({
windowMs: config.server.rateLimits.window,
max: config.server.rateLimits.max,
windowMs: process.env.RATE_LIMIT_WINDOW,
max: process.env.RATE_LIMIT_MAX,
delayMs: 0
});
class Server {
constructor() {
this.port = config.server.ports.backend;
this.port = process.env.SERVER_PORT;
this.server = express();
this.server.set('trust proxy', 1);
this.server.use(helmet());
@ -36,12 +36,6 @@ class Server {
// this.server.use(rateLimiter);
this.routesFolder = path.join(__dirname, '..', 'routes');
this.database = new Database();
this.server.get('/config', (req, res) => res.json({
baseURL: config.backendLocation,
serviceName: config.serviceName,
maxFileSize: config.uploads.uploadMaxSize,
chunkSize: config.uploads.chunkSize
}));
}
registerAllTheRoutes() {
@ -51,24 +45,12 @@ class Server {
if (Array.isArray(RouteClass)) routes = RouteClass;
for (const File of routes) {
const route = new File();
this.server[route.method](config.server.routePrefix + route.path, route.authorize.bind(route));
log.info(`Found route ${route.method.toUpperCase()} ${config.server.routePrefix}${route.path}`);
this.server[route.method](process.env.ROUTE_PREFIX + route.path, route.authorize.bind(route));
log.info(`Found route ${route.method.toUpperCase()} ${process.env.ROUTE_PREFIX}${route.path}`);
}
});
}
writeFrontendConfig() {
const template = oneliner`
module.exports = {
baseURL: '${config.backendLocation}',
serviceName: '${config.serviceName}',
maxFileSize: '${config.uploads.uploadMaxSize}',
chunkSize: '${config.uploads.chunkSize}'
}`;
jetpack.write(path.join(__dirname, '..', '..', 'frontend', 'config.js'), template);
log.success('Frontend config file generated successfully');
}
start() {
jetpack.dir('uploads/chunks');
jetpack.dir('uploads/thumbs/square');

View File

@ -1,9 +1,17 @@
const config = require('../../../config');
// const config = require('../../../config');
const jetpack = require('fs-jetpack');
const randomstring = require('randomstring');
const path = require('path');
const JWT = require('jsonwebtoken');
const db = require('knex')(config.server.database);
const db = require('knex')({
client: process.env.DB_CLIENT,
connection: {
host: process.env.DB_HOST,
user: process.env.DB_USER,
password: process.env.DB_PASS,
database: process.env.DB_DATABASE
}
});
const moment = require('moment');
const log = require('../utils/Log');
const crypto = require('crypto');
@ -13,10 +21,11 @@ const Zip = require('adm-zip');
const imageExtensions = ['.jpg', '.jpeg', '.bmp', '.gif', '.png', '.webp'];
const videoExtensions = ['.webm', '.mp4', '.wmv', '.avi', '.mov'];
const blockedExtensions = process.env.BLOCKED_EXTENSIONS.split(',');
class Util {
static isExtensionBlocked(extension) {
return config.uploads.blockedExtensions.includes(extension);
return blockedExtensions.includes(extension);
}
static generateThumbnails(filename) {
@ -36,38 +45,38 @@ class Util {
const ExifTransformer = require('exif-be-gone');
const toStream = require('buffer-to-stream');
const file = await jetpack.readAsync(path.join(__dirname, '..', '..', '..', config.uploads.uploadFolder, filename), 'buffer');
const writer = jetpack.createWriteStream(path.join(__dirname, '..', '..', '..', config.uploads.uploadFolder, `${filename}.noexif`));
const file = await jetpack.readAsync(path.join(__dirname, '..', '..', '..', process.env.UPLOAD_FOLDER, filename), 'buffer');
const writer = jetpack.createWriteStream(path.join(__dirname, '..', '..', '..', process.env.UPLOAD_FOLDER, `${filename}.noexif`));
toStream(file).pipe(new ExifTransformer()).pipe(writer);
}
*/
static async generateThumbnailForImage(filename, output) {
const file = await jetpack.readAsync(path.join(__dirname, '..', '..', '..', config.uploads.uploadFolder, filename), 'buffer');
const file = await jetpack.readAsync(path.join(__dirname, '..', '..', '..', process.env.UPLOAD_FOLDER, filename), 'buffer');
await sharp(file)
.resize(64, 64)
.toFormat('png')
.toFile(path.join(__dirname, '..', '..', '..', config.uploads.uploadFolder, 'thumbs', 'square', output));
.toFile(path.join(__dirname, '..', '..', '..', process.env.UPLOAD_FOLDER, 'thumbs', 'square', output));
await sharp(file)
.resize(225, null)
.toFormat('png')
.toFile(path.join(__dirname, '..', '..', '..', config.uploads.uploadFolder, 'thumbs', output));
.toFile(path.join(__dirname, '..', '..', '..', process.env.UPLOAD_FOLDER, 'thumbs', output));
}
static generateThumbnailForVideo(filename) {
ffmpeg(path.join(__dirname, '..', '..', '..', config.uploads.uploadFolder, filename))
ffmpeg(path.join(__dirname, '..', '..', '..', process.env.UPLOAD_FOLDER, filename))
.thumbnail({
timestamps: [0],
filename: '%b.png',
folder: path.join(__dirname, '..', '..', '..', config.uploads.uploadFolder, 'thumbs', 'square'),
folder: path.join(__dirname, '..', '..', '..', process.env.UPLOAD_FOLDER, 'thumbs', 'square'),
size: '64x64'
})
.on('error', error => log.error(error.message));
ffmpeg(path.join(__dirname, '..', '..', '..', config.uploads.uploadFolder, filename))
ffmpeg(path.join(__dirname, '..', '..', '..', process.env.UPLOAD_FOLDER, filename))
.thumbnail({
timestamps: [0],
filename: '%b.png',
folder: path.join(__dirname, '..', '..', '..', config.uploads.uploadFolder, 'thumbs'),
folder: path.join(__dirname, '..', '..', '..', process.env.UPLOAD_FOLDER, 'thumbs'),
size: '150x?'
})
.on('error', error => log.error(error.message));
@ -80,11 +89,11 @@ class Util {
}
static constructFilePublicLink(file) {
file.url = `${config.filesServeLocation}/${file.name}`;
file.url = `${process.env.DOMAIN}/${file.name}`;
const thumb = this.getFileThumbnail(file.name);
if (thumb) {
file.thumb = `${config.filesServeLocation}/thumbs/${thumb}`;
file.thumbSquare = `${config.filesServeLocation}/thumbs/square/${thumb}`;
file.thumb = `${process.env.DOMAIN}/thumbs/${thumb}`;
file.thumbSquare = `${process.env.DOMAIN}/thumbs/square/${thumb}`;
}
return file;
}
@ -92,12 +101,13 @@ class Util {
static getUniqueFilename(name) {
const retry = (i = 0) => {
const filename = randomstring.generate({
length: config.uploads.generatedFilenameLength,
length: process.env.GENERATED_FILENAME_LENGTH,
capitalization: 'lowercase'
}) + path.extname(name);
const exists = jetpack.exists(path.join(__dirname, '..', '..', '..', config.uploads.uploadFolder, filename));
const exists = jetpack.exists(path.join(__dirname, '..', '..', '..', process.env.UPLOAD_FOLDER, filename));
if (!exists) return filename;
if (i < config.uploads.retryFilenameTimes) return retry(i + 1);
if (i < 5) return retry(i + 1);
log.error('Couldnt allocate identifier for file');
return null;
};
return retry();
@ -106,7 +116,7 @@ class Util {
static getUniqueAlbumIdentifier() {
const retry = async (i = 0) => {
const identifier = randomstring.generate({
length: config.albums.generatedAlbumLinkLength,
length: process.env.GENERATED_ALBUM_LENGTH,
capitalization: 'lowercase'
});
const exists = await db.table('links').where({ identifier }).first();
@ -114,7 +124,7 @@ class Util {
/*
It's funny but if you do i++ the asignment never gets done resulting in an infinite loop
*/
if (i < config.albums.retryAlbumLinkTimes) return retry(i + 1);
if (i < 5) return retry(i + 1);
log.error('Couldnt allocate identifier for album');
return null;
};
@ -122,7 +132,7 @@ class Util {
}
static async getFileHash(filename) {
const file = await jetpack.readAsync(path.join(__dirname, '..', '..', '..', config.uploads.uploadFolder, filename), 'buffer');
const file = await jetpack.readAsync(path.join(__dirname, '..', '..', '..', process.env.UPLOAD_FOLDER, filename), 'buffer');
if (!file) {
log.error(`There was an error reading the file < ${filename} > for hashing`);
return null;
@ -140,9 +150,9 @@ class Util {
static async deleteFile(filename, deleteFromDB = false) {
const thumbName = this.getFileThumbnail(filename);
try {
await jetpack.removeAsync(path.join(__dirname, '..', '..', '..', config.uploads.uploadFolder, filename));
await jetpack.removeAsync(path.join(__dirname, '..', '..', '..', config.uploads.uploadFolder, 'thumbs', thumbName));
await jetpack.removeAsync(path.join(__dirname, '..', '..', '..', config.uploads.uploadFolder, 'thumbs', 'square', thumbName));
await jetpack.removeAsync(path.join(__dirname, '..', '..', '..', process.env.UPLOAD_FOLDER, filename));
await jetpack.removeAsync(path.join(__dirname, '..', '..', '..', process.env.UPLOAD_FOLDER, 'thumbs', thumbName));
await jetpack.removeAsync(path.join(__dirname, '..', '..', '..', process.env.UPLOAD_FOLDER, 'thumbs', 'square', thumbName));
if (deleteFromDB) {
await db.table('files').where('name', filename).delete();
}
@ -156,7 +166,7 @@ class Util {
try {
const files = await db.table('files').where({ albumId: id });
for (const file of files) {
await jetpack.removeAsync(path.join(__dirname, '..', '..', '..', config.uploads.uploadFolder, file));
await jetpack.removeAsync(path.join(__dirname, '..', '..', '..', process.env.UPLOAD_FOLDER, file));
}
await db.table('files').where({ albumId: id }).delete();
} catch (error) {
@ -169,7 +179,7 @@ class Util {
const token = req.headers.authorization.split(' ')[1];
if (!token) return false;
return JWT.verify(token, config.server.secret, async (error, decoded) => {
return JWT.verify(token, process.env.SECRET, async (error, decoded) => {
if (error) {
log.error(error);
return false;
@ -189,9 +199,9 @@ class Util {
try {
const zip = new Zip();
for (const file of files) {
zip.addLocalFile(path.join(__dirname, '..', '..', '..', config.uploads.uploadFolder, file));
zip.addLocalFile(path.join(__dirname, '..', '..', '..', process.env.UPLOAD_FOLDER, file));
}
zip.writeZip(path.join(__dirname, '..', '..', '..', config.uploads.uploadFolder, 'zips', `${album.userId}-${album.id}.zip`));
zip.writeZip(path.join(__dirname, '..', '..', '..', process.env.UPLOAD_FOLDER, 'zips', `${album.userId}-${album.id}.zip`));
} catch (error) {
log.error(error);
}

View File

@ -1,3 +1,4 @@
const jetpack = require('fs-jetpack');
const qoa = require('qoa');
qoa.config({
prefix: '>',
@ -18,69 +19,117 @@ async function start() {
{
type: 'input',
query: 'Port to run the API in:',
handle: 'serverPort'
handle: 'SERVER_PORT'
},
{
type: 'input',
query: 'Port to run the Website in:',
handle: 'websitePort'
handle: 'WEBSITE_PORT'
},
{
type: 'input',
query: 'Full domain this instance is gonna be running on (Ex: https://lolisafe.moe):',
handle: 'fullDomain'
handle: 'DOMAIN'
},
{
type: 'input',
query: 'Name of the service? (Ex: lolisafe):',
handle: 'SERVICE_NAME'
},
{
type: 'input',
query: 'Maximum allowed upload file size in MB (Ex: 100):',
handle: 'maxSize'
handle: 'MAX_SIZE'
},
{
type: 'confirm',
query: 'Generate thumbnails for images/videos? (Requires ffmpeg installed and in your PATH)',
handle: 'generateThumbnails',
handle: 'GENERATE_THUMBNAILS',
accept: 'y',
deny: 'n'
},
{
type: 'confirm',
query: 'Allow users to download entire albums in ZIP format?',
handle: 'generateZips',
handle: 'GENERATE_ZIPS',
accept: 'y',
deny: 'n'
},
{
type: 'interactive',
query: 'How would you like to serve the uploaded files?',
handle: 'serveWithNode',
menu: [
'With NGINX (Faster but needs a bit more setup)',
'With node'
]
type: 'confirm',
query: 'Strip EXIF information from uploaded images if possible?',
handle: 'STRIP_EXIF',
accept: 'y',
deny: 'n'
},
{
type: 'confirm',
query: 'Run lolisafe in public mode?',
handle: 'publicMode',
query: 'Serve files with node?',
handle: 'SERVE_WITH_NODE',
accept: 'y',
deny: 'n'
},
{
type: 'input',
query: 'Base number of characters for generated file URLs (12 should be good enough):',
handle: 'GENERATED_FILENAME_LENGTH'
},
{
type: 'input',
query: 'Base number of characters for generated album URLs (6 should be enough):',
handle: 'GENERATED_ALBUM_LENGTH'
},
{
type: 'confirm',
query: 'Run lolisafe in public mode? (People will be able to upload without an account)',
handle: 'PUBLIC_MODE',
accept: 'y',
deny: 'n'
},
{
type: 'confirm',
query: 'Enable user signup for new accounts?',
handle: 'enableUserAccounts',
handle: 'USER_ACCOUNTS',
accept: 'y',
deny: 'n'
},
{
type: 'secure',
query: 'Type a secure password for the root user:',
handle: 'rootPassword'
handle: 'ROOT_PASSWORD'
}
];
const response = await qoa.prompt(wizard);
console.log(response);
let envfile = '';
const defaultSettings = {
CHUNK_SIZE: 90,
ROUTE_PREFIX: '/api',
RATE_LIMIT_WINDOW: 2,
RATE_LIMIT_MAX: 5,
DB_CLIENT: 'pg',
DB_HOST: 'localhost',
DB_USER: '',
DB_PASSWORD: '',
DB_DATABASE: '',
BLOCKED_EXTENSIONS: ['.jar', '.exe', '.msi', '.com', '.bat', '.cmd', '.scr', '.ps1', '.sh'],
UPLOAD_FOLDER: 'uploads',
SECRET: 'SuperSecretPassphraseHere',
MAX_LINKS_PER_ALBUM: 5
};
const allSettings = Object.assign(defaultSettings, response);
const keys = Object.keys(allSettings);
for (const item of keys) {
envfile += `${item}=${allSettings[item]}\n`;
}
jetpack.write('.env', envfile);
console.log();
console.log('== .env file generated successfully. You can now run lolisafe ==');
console.log();
}
start();

View File

@ -3123,6 +3123,11 @@ dot-prop@^4.1.1:
dependencies:
is-obj "^1.0.0"
dotenv@^6.2.0:
version "6.2.0"
resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-6.2.0.tgz#941c0410535d942c8becf28d3f357dbd9d476064"
integrity sha512-HygQCKUBSFl8wKQZBSemMywRWcEDNidvNbjGVyZu3nbZ8qq9ubiPoGLMdRDpfSrpkkm9BXYFkpKxxFX38o/76w==
dropzone@^5.5.0:
version "5.5.1"
resolved "https://registry.yarnpkg.com/dropzone/-/dropzone-5.5.1.tgz#06e2f513e61d6aa363d4b556f18574f47cf7ba26"
@ -7471,6 +7476,11 @@ q@^1.1.2:
resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7"
integrity sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc=
qoa@^0.2.0:
version "0.2.0"
resolved "https://registry.yarnpkg.com/qoa/-/qoa-0.2.0.tgz#2e4ea56a388982da570bf2025733b72e2f857aab"
integrity sha512-svEO3uevNU354fUBWgMxGJH0spF29EZRe140YL20PP+5C25V+u0eMeFforSIiop2879uXgxI+IFwibHBGcseEA==
qs@6.5.2, qs@~6.5.2:
version "6.5.2"
resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36"