feature: uploader with chunks support

This commit is contained in:
Pitu 2019-10-01 14:11:16 -03:00
parent a552aca8ab
commit 579e1e754a
5 changed files with 190 additions and 9 deletions

1
.gitignore vendored
View File

@ -18,3 +18,4 @@ database.sqlite
uploads/
src/oldsite
.env
!src/api/routes/uploads

View File

@ -0,0 +1,76 @@
const Route = require('../../structures/Route');
const path = require('path');
const Util = require('../../utils/Util');
const jetpack = require('fs-jetpack');
const randomstring = require('randomstring');
class uploadPOST extends Route {
constructor() {
super('/upload/chunks', 'post', { bypassAuth: true });
}
async run(req, res, db) {
const filename = Util.getUniqueFilename(randomstring.generate(32));
// console.log('Files', req.body.files);
const info = {
size: req.body.files[0].size,
url: `${process.env.DOMAIN}/`
};
for (const chunk of req.body.files) {
const { uuid, count } = chunk;
// console.log('Chunk', chunk);
const chunkOutput = path.join(__dirname,
'..',
'..',
'..',
'..',
process.env.UPLOAD_FOLDER,
'chunks',
uuid);
const chunkDir = await jetpack.list(chunkOutput);
const ext = path.extname(chunkDir[0]);
const output = path.join(__dirname,
'..',
'..',
'..',
'..',
process.env.UPLOAD_FOLDER,
`${filename}${ext || ''}`);
chunkDir.sort();
// Save some data
info.name = `${filename}${ext || ''}`;
info.url += `${filename}${ext || ''}`;
for (let i = 0; i < chunkDir.length; i++) {
const dir = path.join(__dirname,
'..',
'..',
'..',
'..',
process.env.UPLOAD_FOLDER,
'chunks',
uuid,
chunkDir[i]);
const file = await jetpack.readAsync(dir, 'buffer');
await jetpack.appendAsync(output, file);
}
await jetpack.removeAsync(chunkOutput);
}
return res.send(201, {
message: 'Sucessfully merged the chunk(s).',
...info
/*
name: `${filename}${ext || ''}`,
size: exists.size,
url: `${process.env.DOMAIN}/${exists.name}`,
deleteUrl: `${process.env.DOMAIN}/api/file/${exists.id}`
*/
});
}
}
module.exports = uploadPOST;

View File

@ -0,0 +1,94 @@
const Route = require('../../structures/Route');
const path = require('path');
const Util = require('../../utils/Util');
const jetpack = require('fs-jetpack');
const multer = require('multer');
const upload = multer({
storage: multer.memoryStorage(),
limits: {
fileSize: parseInt(process.env.MAX_SIZE, 10) * (1000 * 1000),
files: 1
},
fileFilter: (req, file, cb) => {
/*
if (options.blacklist.mimes.includes(file.mimetype)) {
return cb(new Error(`${file.mimetype} is a blacklisted filetype.`));
} else if (options.blacklist.extensions.some(ext => path.extname(file.originalname).toLowerCase() === ext)) {
return cb(new Error(`${path.extname(file.originalname).toLowerCase()} is a blacklisted extension.`));
}
*/
return cb(null, true);
}
}).array('files[]');
class uploadPOST extends Route {
constructor() {
super('/upload', 'post', { bypassAuth: true });
}
async run(req, res, db) {
const user = await Util.isAuthorized(req);
if (!user && process.env.PUBLIC_MODE == 'false') return res.status(401).json({ message: 'Not authorized to use this resource' });
return upload(req, res, async err => {
if (err) console.error(err.message);
const remappedKeys = this._remapKeys(req.body);
// const { uuid, chunkindex } = this._remapKeys(req.body);
let uploadedFile = {};
for (const file of req.files) {
// console.log(file);
const ext = path.extname(file.originalname);
const hash = Util.generateFileHash(file.buffer);
const filename = Util.getUniqueFilename(file.originalname);
if (remappedKeys && remappedKeys.uuid) {
const chunkOutput = path.join(__dirname,
'..',
'..',
'..',
'..',
process.env.UPLOAD_FOLDER,
'chunks',
remappedKeys.uuid,
`${remappedKeys.chunkindex.padStart(3, 0)}${ext || ''}`);
await jetpack.writeAsync(chunkOutput, file.buffer);
} else {
const output = path.join(__dirname,
'..',
'..',
'..',
'..',
process.env.UPLOAD_FOLDER,
filename);
await jetpack.writeAsync(output, file.buffer);
uploadedFile = {
name: filename,
hash,
size: file.buffer.length,
url: filename
};
}
}
if (!remappedKeys || !remappedKeys.uuid) Util.generateThumbnails(uploadedFile.name);
return res.send(201, {
message: 'Sucessfully uploaded the file.',
...uploadedFile
});
});
}
_remapKeys(body) {
const keys = Object.keys(body);
if (keys.length) {
for (const key of keys) {
if (!/^dz/.test(key)) continue;
body[key.replace(/^dz/, '')] = body[key];
delete body[key];
}
return body;
}
}
}
module.exports = uploadPOST;

View File

@ -155,6 +155,11 @@ class Util {
return hash.digest('hex');
}
static generateFileHash(data) {
const hash = crypto.createHash('sha1').update(data).digest('hex');
return hash;
}
static getFilenameFromPath(fullPath) {
return fullPath.replace(/^.*[\\\/]/, ''); // eslint-disable-line no-useless-escape
}

View File

@ -108,14 +108,15 @@ export default {
mounted() {
this.dropzoneOptions = {
url: `${this.config.baseURL}/upload`,
timeout: 300000, // 5 minutes
timeout: 600000, // 10 minutes
autoProcessQueue: true,
addRemoveLinks: false,
parallelUploads: 5,
uploadMultiple: false,
maxFiles: 1000,
createImageThumbnails: false,
paramName: 'file',
paramName: 'files[]',
forceChunking: false,
chunking: true,
retryChunks: true,
retryChunksLimit: 3,
@ -169,14 +170,18 @@ export default {
});
console.error(file, message, xhr);
},
dropzoneChunksUploaded(file, done) {
const response = JSON.parse(file.xhr.response);
if (!response.url) {
console.error('There was a problem uploading the file?');
return done();
}
async dropzoneChunksUploaded(file, done) {
const { data } = await this.$axios.post(`${this.config.baseURL}/upload/chunks`, {
files: [{
uuid: file.upload.uuid,
original: file.name,
size: file.size,
type: file.type,
count: file.upload.totalChunkCount
}]
});
this.processResult(file, response);
this.processResult(file, data);
this.$forceUpdate();
return done();
},