2019-10-01 19:11:16 +02:00
|
|
|
const path = require('path');
|
|
|
|
const jetpack = require('fs-jetpack');
|
|
|
|
const multer = require('multer');
|
2019-10-12 07:37:09 +02:00
|
|
|
const moment = require('moment');
|
2020-07-08 03:00:12 +02:00
|
|
|
const Util = require('../../utils/Util');
|
|
|
|
const Route = require('../../structures/Route');
|
|
|
|
|
2019-10-01 19:11:16 +02:00
|
|
|
const upload = multer({
|
|
|
|
storage: multer.memoryStorage(),
|
|
|
|
limits: {
|
|
|
|
fileSize: parseInt(process.env.MAX_SIZE, 10) * (1000 * 1000),
|
2020-12-24 09:40:50 +01:00
|
|
|
files: 1
|
2019-10-01 19:11:16 +02:00
|
|
|
},
|
2020-07-08 03:00:12 +02:00
|
|
|
fileFilter: (req, file, cb) =>
|
|
|
|
// TODO: Enable blacklisting of files/extensions
|
2019-10-01 19:11:16 +02:00
|
|
|
/*
|
|
|
|
if (options.blacklist.mimes.includes(file.mimetype)) {
|
|
|
|
return cb(new Error(`${file.mimetype} is a blacklisted filetype.`));
|
|
|
|
} else if (options.blacklist.extensions.some(ext => path.extname(file.originalname).toLowerCase() === ext)) {
|
|
|
|
return cb(new Error(`${path.extname(file.originalname).toLowerCase()} is a blacklisted extension.`));
|
|
|
|
}
|
|
|
|
*/
|
2020-07-08 03:00:12 +02:00
|
|
|
cb(null, true)
|
2020-12-24 09:40:50 +01:00
|
|
|
|
2019-10-01 19:11:16 +02:00
|
|
|
}).array('files[]');
|
|
|
|
|
2020-05-10 17:19:10 +02:00
|
|
|
/*
|
|
|
|
TODO: If source has transparency generate a png thumbnail, otherwise a jpg.
|
|
|
|
TODO: If source is a gif, generate a thumb of the first frame and play the gif on hover on the frontend.
|
|
|
|
|
|
|
|
TODO: Think if its worth making a folder with the user uuid in uploads/ and upload the pictures there so
|
|
|
|
that this way at least not every single file will be in 1 directory
|
|
|
|
|
2020-07-08 03:00:12 +02:00
|
|
|
XXX: Now that the default behaviour is to serve files with node, we can actually pull this off.
|
|
|
|
Before this, having files in subfolders meant messing with nginx and the paths,
|
|
|
|
but now it should be fairly easy to re-arrange the folder structure with express.static
|
|
|
|
I see great value in this, open to suggestions.
|
2020-05-10 17:19:10 +02:00
|
|
|
*/
|
|
|
|
|
2019-10-01 19:11:16 +02:00
|
|
|
class uploadPOST extends Route {
|
|
|
|
constructor() {
|
2020-05-10 17:57:56 +02:00
|
|
|
super('/upload', 'post', {
|
|
|
|
bypassAuth: true,
|
2020-12-24 09:40:50 +01:00
|
|
|
canApiKey: true
|
2020-05-10 17:57:56 +02:00
|
|
|
});
|
2019-10-01 19:11:16 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
async run(req, res, db) {
|
|
|
|
const user = await Util.isAuthorized(req);
|
2020-07-08 03:00:12 +02:00
|
|
|
if (!user && process.env.PUBLIC_MODE === 'false') return res.status(401).json({ message: 'Not authorized to use this resource' });
|
2019-10-12 07:37:09 +02:00
|
|
|
|
2019-10-12 10:52:49 +02:00
|
|
|
const albumId = req.body.albumid || req.headers.albumid;
|
|
|
|
if (albumId && !user) return res.status(401).json({ message: 'Only registered users can upload files to an album' });
|
|
|
|
if (albumId && user) {
|
|
|
|
const album = await db.table('albums').where({ id: albumId, userId: user.id }).first();
|
|
|
|
if (!album) return res.status(401).json({ message: 'Album doesn\'t exist or it doesn\'t belong to the user' });
|
|
|
|
}
|
|
|
|
|
2020-12-24 15:45:16 +01:00
|
|
|
return upload(req, res, async err => {
|
2019-10-01 19:11:16 +02:00
|
|
|
if (err) console.error(err.message);
|
|
|
|
|
|
|
|
let uploadedFile = {};
|
2019-10-12 07:37:09 +02:00
|
|
|
let insertedId;
|
|
|
|
|
2020-07-08 03:00:12 +02:00
|
|
|
// eslint-disable-next-line no-underscore-dangle
|
2019-10-12 07:37:09 +02:00
|
|
|
const remappedKeys = this._remapKeys(req.body);
|
2019-10-12 11:18:32 +02:00
|
|
|
const file = req.files[0];
|
|
|
|
|
|
|
|
const ext = path.extname(file.originalname);
|
|
|
|
const hash = Util.generateFileHash(file.buffer);
|
|
|
|
|
|
|
|
const filename = Util.getUniqueFilename(file.originalname);
|
|
|
|
|
|
|
|
/*
|
|
|
|
First let's get the hash of the file. This will be useful to check if the file
|
|
|
|
has already been upload by either the user or an anonymous user.
|
|
|
|
In case this is true, instead of uploading it again we retrieve the url
|
|
|
|
of the file that is already saved and thus don't store extra copies of the same file.
|
|
|
|
|
|
|
|
For this we need to wait until we have a filename so that we can delete the uploaded file.
|
|
|
|
*/
|
|
|
|
const exists = await this.checkIfFileExists(db, user, hash);
|
|
|
|
if (exists) return this.fileExists(res, exists, filename);
|
|
|
|
|
|
|
|
if (remappedKeys && remappedKeys.uuid) {
|
|
|
|
const chunkOutput = path.join(__dirname,
|
2020-07-08 18:22:25 +02:00
|
|
|
'../../../../',
|
2019-10-12 11:18:32 +02:00
|
|
|
process.env.UPLOAD_FOLDER,
|
|
|
|
'chunks',
|
|
|
|
remappedKeys.uuid,
|
|
|
|
`${remappedKeys.chunkindex.padStart(3, 0)}${ext || ''}`);
|
|
|
|
await jetpack.writeAsync(chunkOutput, file.buffer);
|
|
|
|
} else {
|
|
|
|
const output = path.join(__dirname,
|
2020-07-08 18:22:25 +02:00
|
|
|
'../../../../',
|
2019-10-12 11:18:32 +02:00
|
|
|
process.env.UPLOAD_FOLDER,
|
|
|
|
filename);
|
|
|
|
await jetpack.writeAsync(output, file.buffer);
|
|
|
|
uploadedFile = {
|
|
|
|
name: filename,
|
|
|
|
hash,
|
|
|
|
size: file.buffer.length,
|
2020-12-24 09:40:50 +01:00
|
|
|
url: filename
|
2019-10-12 11:18:32 +02:00
|
|
|
};
|
2019-10-01 19:11:16 +02:00
|
|
|
}
|
|
|
|
|
2019-10-12 07:37:09 +02:00
|
|
|
if (!remappedKeys || !remappedKeys.uuid) {
|
|
|
|
Util.generateThumbnails(uploadedFile.name);
|
2020-12-26 20:27:56 +01:00
|
|
|
insertedId = await Util.saveFileToDatabase(req, res, user, db, uploadedFile, file);
|
2019-10-12 07:37:09 +02:00
|
|
|
if (!insertedId) return res.status(500).json({ message: 'There was an error saving the file.' });
|
|
|
|
uploadedFile.deleteUrl = `${process.env.DOMAIN}/api/file/${insertedId[0]}`;
|
2019-10-01 19:11:16 +02:00
|
|
|
|
2019-10-12 11:18:32 +02:00
|
|
|
/*
|
|
|
|
If the upload had an album specified we make sure to create the relation
|
|
|
|
and update the according timestamps..
|
|
|
|
*/
|
2020-12-26 20:27:56 +01:00
|
|
|
Util.saveFileToAlbum(db, albumId, insertedId);
|
2019-10-12 10:52:49 +02:00
|
|
|
}
|
|
|
|
|
2020-07-17 20:23:59 +02:00
|
|
|
uploadedFile = Util.constructFilePublicLink(uploadedFile);
|
2019-10-12 07:37:09 +02:00
|
|
|
return res.status(201).send({
|
2019-10-01 19:11:16 +02:00
|
|
|
message: 'Sucessfully uploaded the file.',
|
2020-12-24 09:40:50 +01:00
|
|
|
...uploadedFile
|
2019-10-01 19:11:16 +02:00
|
|
|
});
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2019-10-12 11:18:32 +02:00
|
|
|
fileExists(res, exists, filename) {
|
2020-07-17 20:23:59 +02:00
|
|
|
exists = Util.constructFilePublicLink(exists);
|
2019-10-12 11:18:32 +02:00
|
|
|
res.json({
|
|
|
|
message: 'Successfully uploaded the file.',
|
|
|
|
name: exists.name,
|
|
|
|
hash: exists.hash,
|
|
|
|
size: exists.size,
|
|
|
|
url: `${process.env.DOMAIN}/${exists.name}`,
|
|
|
|
deleteUrl: `${process.env.DOMAIN}/api/file/${exists.id}`,
|
2020-12-24 09:40:50 +01:00
|
|
|
repeated: true
|
2019-10-12 11:18:32 +02:00
|
|
|
});
|
|
|
|
|
|
|
|
return Util.deleteFile(filename);
|
|
|
|
}
|
|
|
|
|
|
|
|
async checkIfFileExists(db, user, hash) {
|
|
|
|
const exists = await db.table('files')
|
2020-12-24 15:45:16 +01:00
|
|
|
.where(function() { // eslint-disable-line func-names
|
2019-10-12 11:18:32 +02:00
|
|
|
if (user) this.where('userId', user.id);
|
|
|
|
else this.whereNull('userId');
|
|
|
|
})
|
|
|
|
.where({ hash })
|
|
|
|
.first();
|
|
|
|
return exists;
|
|
|
|
}
|
|
|
|
|
2019-10-01 19:11:16 +02:00
|
|
|
_remapKeys(body) {
|
|
|
|
const keys = Object.keys(body);
|
|
|
|
if (keys.length) {
|
|
|
|
for (const key of keys) {
|
|
|
|
if (!/^dz/.test(key)) continue;
|
|
|
|
body[key.replace(/^dz/, '')] = body[key];
|
|
|
|
delete body[key];
|
|
|
|
}
|
|
|
|
return body;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
module.exports = uploadPOST;
|