chore: Move statistics related functions to it's own file
fix: Extract database constructor to a separate file to ensure we only create one knex instance/db feat: Add cron for saving the stats to the database every hour feat: Get cached stats from database (if a stat is not found in the db, generate it)
This commit is contained in:
parent
9370c32182
commit
f151a8ac3a
|
@ -5560,6 +5560,14 @@
|
|||
"sha.js": "^2.4.8"
|
||||
}
|
||||
},
|
||||
"cron": {
|
||||
"version": "1.8.2",
|
||||
"resolved": "https://registry.npmjs.org/cron/-/cron-1.8.2.tgz",
|
||||
"integrity": "sha512-Gk2c4y6xKEO8FSAUTklqtfSr7oTq0CiPQeLBG5Fl0qoXpZyMcj1SG59YL+hqq04bu6/IuEA7lMkYDAplQNKkyg==",
|
||||
"requires": {
|
||||
"moment-timezone": "^0.5.x"
|
||||
}
|
||||
},
|
||||
"cross-env": {
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/cross-env/-/cross-env-5.2.1.tgz",
|
||||
|
@ -11911,6 +11919,14 @@
|
|||
"resolved": "https://registry.npmjs.org/moment/-/moment-2.24.0.tgz",
|
||||
"integrity": "sha512-bV7f+6l2QigeBBZSM/6yTNq4P2fNpSWj/0e7jQcy87A8e7o2nAfP/34/2ky5Vw4B9S446EtIhodAzkFCcR4dQg=="
|
||||
},
|
||||
"moment-timezone": {
|
||||
"version": "0.5.32",
|
||||
"resolved": "https://registry.npmjs.org/moment-timezone/-/moment-timezone-0.5.32.tgz",
|
||||
"integrity": "sha512-Z8QNyuQHQAmWucp8Knmgei8YNo28aLjJq6Ma+jy1ZSpSk5nyfRT8xgUbSQvD2+2UajISfenndwvFuH3NGS+nvA==",
|
||||
"requires": {
|
||||
"moment": ">= 2.9.0"
|
||||
}
|
||||
},
|
||||
"morgan": {
|
||||
"version": "1.10.0",
|
||||
"resolved": "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz",
|
||||
|
|
|
@ -47,6 +47,7 @@
|
|||
"compression": "^1.7.2",
|
||||
"cookie-universal-nuxt": "^2.0.14",
|
||||
"cors": "^2.8.5",
|
||||
"cron": "^1.8.2",
|
||||
"dotenv": "^6.2.0",
|
||||
"dumper.js": "^1.3.1",
|
||||
"express": "^4.17.1",
|
||||
|
|
|
@ -80,6 +80,7 @@ exports.up = async knex => {
|
|||
table.timestamp('createdAt');
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = async knex => {
|
||||
await knex.schema.dropTableIfExists('users');
|
||||
await knex.schema.dropTableIfExists('albums');
|
||||
|
|
|
@ -28,6 +28,7 @@ exports.up = async knex => {
|
|||
table.unique(['fileId', 'tagId']);
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = async () => {
|
||||
// Nothing
|
||||
};
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
|
||||
exports.up = async knex => {
|
||||
await knex.schema.createTable('statistics', table => {
|
||||
table.increments();
|
||||
table.integer('batchId');
|
||||
table.string('type');
|
||||
table.json('data');
|
||||
table.timestamp('createdAt');
|
||||
|
||||
table.unique(['batchId', 'type']);
|
||||
});
|
||||
};
|
||||
|
||||
exports.down = async knex => {
|
||||
await knex.schema.dropTableIfExists('statistics');
|
||||
};
|
|
@ -1,15 +1,7 @@
|
|||
const Route = require('../../structures/Route');
|
||||
const Util = require('../../utils/Util');
|
||||
const si = require('systeminformation');
|
||||
|
||||
// TODO: Implement a cache system that can be reset by other endpoints
|
||||
const statsCache = {
|
||||
system: null,
|
||||
fileSystems: null,
|
||||
uploads: null,
|
||||
users: null,
|
||||
albums: null
|
||||
};
|
||||
const StatsGenerator = require('../../utils/StatsGenerator');
|
||||
|
||||
// Thank you Bobby for the stats code https://github.com/BobbyWibowo/lolisafe/blob/safe.fiery.me/controllers/utilsController.js
|
||||
class filesGET extends Route {
|
||||
|
@ -17,191 +9,27 @@ class filesGET extends Route {
|
|||
super('/admin/stats', 'get', { adminOnly: true });
|
||||
}
|
||||
|
||||
async getSystemInfo() {
|
||||
const os = await si.osInfo();
|
||||
|
||||
const currentLoad = await si.currentLoad();
|
||||
const mem = await si.mem();
|
||||
const time = si.time();
|
||||
const nodeUptime = process.uptime();
|
||||
|
||||
return {
|
||||
'Platform': `${os.platform} ${os.arch}`,
|
||||
'Distro': `${os.distro} ${os.release}`,
|
||||
'Kernel': os.kernel,
|
||||
'CPU Load': `${currentLoad.currentload.toFixed(1)}%`,
|
||||
'CPUs Load': currentLoad.cpus.map(cpu => `${cpu.load.toFixed(1)}%`).join(', '),
|
||||
'System Memory': {
|
||||
value: {
|
||||
used: mem.active,
|
||||
total: mem.total
|
||||
},
|
||||
type: 'byteUsage'
|
||||
},
|
||||
'Memory Usage': {
|
||||
value: process.memoryUsage().rss,
|
||||
type: 'byte'
|
||||
},
|
||||
'System Uptime': {
|
||||
value: time.uptime,
|
||||
type: 'time'
|
||||
},
|
||||
'Node.js': `${process.versions.node}`,
|
||||
'Service Uptime': {
|
||||
value: Math.floor(nodeUptime),
|
||||
type: 'time'
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async getFileSystemsInfo() {
|
||||
const stats = {};
|
||||
|
||||
const fsSize = await si.fsSize();
|
||||
for (const fs of fsSize) {
|
||||
stats[`${fs.fs} (${fs.type}) on ${fs.mount}`] = {
|
||||
value: {
|
||||
total: fs.size,
|
||||
used: fs.used
|
||||
},
|
||||
type: 'byteUsage'
|
||||
};
|
||||
}
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
async getUploadsInfo(db) {
|
||||
const stats = {
|
||||
'Total': 0,
|
||||
'Images': 0,
|
||||
'Videos': 0,
|
||||
'Others': {
|
||||
data: {},
|
||||
count: 0,
|
||||
type: 'detailed'
|
||||
},
|
||||
'Temporary': 0,
|
||||
'Size in DB': {
|
||||
value: 0,
|
||||
type: 'byte'
|
||||
}
|
||||
};
|
||||
|
||||
const getFilesCountAndSize = async () => {
|
||||
const uploads = await db.table('files').select('size');
|
||||
|
||||
return {
|
||||
'Total': uploads.length,
|
||||
'Size in DB': {
|
||||
value: uploads.reduce((acc, upload) => acc + parseInt(upload.size, 10), 0),
|
||||
type: 'byte'
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const getImagesCount = async () => {
|
||||
const Images = await db.table('files')
|
||||
.where('type', 'like', `image/%`)
|
||||
.count('id as count')
|
||||
.then(rows => rows[0].count);
|
||||
|
||||
return { Images };
|
||||
};
|
||||
|
||||
const getVideosCount = async () => {
|
||||
const Videos = await db.table('files')
|
||||
.where('type', 'like', `video/%`)
|
||||
.count('id as count')
|
||||
.then(rows => rows[0].count);
|
||||
|
||||
return { Videos };
|
||||
};
|
||||
|
||||
const getOthersCount = async () => {
|
||||
// rename to key, value from type, count
|
||||
const data = await db.table('files')
|
||||
.select('type as key')
|
||||
.count('id as value')
|
||||
.whereNot('type', 'like', `image/%`)
|
||||
.whereNot('type', 'like', `video/%`)
|
||||
.groupBy('key')
|
||||
.orderBy('value', 'desc');
|
||||
|
||||
const count = data.reduce((acc, val) => acc + val.value, 0);
|
||||
|
||||
return {
|
||||
Others: {
|
||||
data,
|
||||
count,
|
||||
type: 'detailed'
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const result = await Promise.all([getFilesCountAndSize(), getImagesCount(), getVideosCount(), getOthersCount()]);
|
||||
|
||||
return { ...stats, ...Object.assign({}, ...result) };
|
||||
}
|
||||
|
||||
async getUsersInfo(db) {
|
||||
const stats = {
|
||||
Total: 0,
|
||||
Admins: 0,
|
||||
Disabled: 0
|
||||
};
|
||||
|
||||
const users = await db.table('users');
|
||||
stats.Total = users.length;
|
||||
|
||||
for (const user of users) {
|
||||
if (!user.enabled) {
|
||||
stats.Disabled++;
|
||||
}
|
||||
|
||||
if (user.isAdmin) {
|
||||
stats.Admins++;
|
||||
}
|
||||
}
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
async getAlbumStats(db) {
|
||||
const stats = {
|
||||
'Total': 0,
|
||||
'NSFW': 0,
|
||||
'Generated archives': 0,
|
||||
'Generated identifiers': 0,
|
||||
'Files in albums': 0
|
||||
};
|
||||
|
||||
const albums = await db.table('albums');
|
||||
stats.Total = albums.length;
|
||||
for (const album of albums) {
|
||||
if (album.nsfw) stats.NSFW++;
|
||||
if (album.zipGeneratedAt) stats['Generated archives']++; // XXX: Bobby checks each after if a zip really exists on the disk. Is it really needed?
|
||||
}
|
||||
|
||||
stats['Generated identifiers'] = await db.table('albumsLinks').count('id as count').then(rows => rows[0].count);
|
||||
stats['Files in albums'] = await db.table('albumsFiles')
|
||||
.whereNotNull('albumId')
|
||||
.count('id as count')
|
||||
.then(rows => rows[0].count);
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
async run(req, res, db) {
|
||||
const tmp = {
|
||||
system: await this.getSystemInfo(),
|
||||
fileSystems: await this.getFileSystemsInfo(),
|
||||
uploads: await this.getUploadsInfo(db),
|
||||
users: await this.getUsersInfo(db),
|
||||
albums: await this.getAlbumStats(db)
|
||||
};
|
||||
const cachedStats = await db('statistics')
|
||||
.select('type', 'data', 'batchId')
|
||||
.where('batchId', '=', db('statistics').max('batchId'));
|
||||
|
||||
return res.json(tmp);
|
||||
let stats = cachedStats.reduce((acc, { type, data }) => {
|
||||
try {
|
||||
acc[type] = JSON.parse(data);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
stats = { ...stats, ...(await StatsGenerator.getMissingStats(db, Object.keys(stats))) };
|
||||
|
||||
return res.json(StatsGenerator.keyOrder.reduce((acc, k) => {
|
||||
acc[k] = stats[k];
|
||||
return acc;
|
||||
}, {}));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
const nodePath = require('path');
|
||||
const db = require('knex')({
|
||||
client: process.env.DB_CLIENT,
|
||||
connection: {
|
||||
host: process.env.DB_HOST,
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASSWORD,
|
||||
database: process.env.DB_DATABASE,
|
||||
filename: nodePath.join(__dirname, '../../../database/database.sqlite')
|
||||
},
|
||||
postProcessResponse: result => {
|
||||
/*
|
||||
Fun fact: Depending on the database used by the user and given that I don't want
|
||||
to force a specific database for everyone because of the nature of this project,
|
||||
some things like different data types for booleans need to be considered like in
|
||||
the implementation below where sqlite returns 1 and 0 instead of true and false.
|
||||
*/
|
||||
const booleanFields = ['enabled', 'enableDownload', 'isAdmin', 'nsfw'];
|
||||
|
||||
const processResponse = row => {
|
||||
Object.keys(row).forEach(key => {
|
||||
if (booleanFields.includes(key)) {
|
||||
if (row[key] === 0) row[key] = false;
|
||||
else if (row[key] === 1) row[key] = true;
|
||||
}
|
||||
});
|
||||
return row;
|
||||
};
|
||||
|
||||
if (Array.isArray(result)) return result.map(row => processResponse(row));
|
||||
if (typeof result === 'object') return processResponse(result);
|
||||
return result;
|
||||
},
|
||||
useNullAsDefault: process.env.DB_CLIENT === 'sqlite3',
|
||||
userParams: {
|
||||
lastMutationTime: null
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = db;
|
|
@ -1,39 +1,5 @@
|
|||
const nodePath = require('path');
|
||||
const JWT = require('jsonwebtoken');
|
||||
const db = require('knex')({
|
||||
client: process.env.DB_CLIENT,
|
||||
connection: {
|
||||
host: process.env.DB_HOST,
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASSWORD,
|
||||
database: process.env.DB_DATABASE,
|
||||
filename: nodePath.join(__dirname, '../../../database/database.sqlite')
|
||||
},
|
||||
postProcessResponse: result => {
|
||||
/*
|
||||
Fun fact: Depending on the database used by the user and given that I don't want
|
||||
to force a specific database for everyone because of the nature of this project,
|
||||
some things like different data types for booleans need to be considered like in
|
||||
the implementation below where sqlite returns 1 and 0 instead of true and false.
|
||||
*/
|
||||
const booleanFields = ['enabled', 'enableDownload', 'isAdmin', 'nsfw'];
|
||||
|
||||
const processResponse = row => {
|
||||
Object.keys(row).forEach(key => {
|
||||
if (booleanFields.includes(key)) {
|
||||
if (row[key] === 0) row[key] = false;
|
||||
else if (row[key] === 1) row[key] = true;
|
||||
}
|
||||
});
|
||||
return row;
|
||||
};
|
||||
|
||||
if (Array.isArray(result)) return result.map(row => processResponse(row));
|
||||
if (typeof result === 'object') return processResponse(result);
|
||||
return result;
|
||||
},
|
||||
useNullAsDefault: process.env.DB_CLIENT === 'sqlite3'
|
||||
});
|
||||
const db = require('./Database');
|
||||
const moment = require('moment');
|
||||
const log = require('../utils/Log');
|
||||
|
||||
|
|
|
@ -14,8 +14,11 @@ const jetpack = require('fs-jetpack');
|
|||
const path = require('path');
|
||||
const morgan = require('morgan');
|
||||
const rfs = require('rotating-file-stream');
|
||||
const CronJob = require('cron').CronJob;
|
||||
const log = require('../utils/Log');
|
||||
|
||||
const Util = require('../utils/Util');
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const rateLimiter = new RateLimit({
|
||||
windowMs: parseInt(process.env.RATE_LIMIT_WINDOW, 10),
|
||||
|
@ -55,6 +58,9 @@ class Server {
|
|||
// Serve the uploads
|
||||
this.server.use(express.static(path.join(__dirname, '../../../uploads')));
|
||||
this.routesFolder = path.join(__dirname, '../routes');
|
||||
|
||||
// Save the cron job instances in case we want to stop them later
|
||||
this.jobs = {};
|
||||
}
|
||||
|
||||
registerAllTheRoutes() {
|
||||
|
@ -95,6 +101,11 @@ class Server {
|
|||
});
|
||||
}
|
||||
|
||||
createJobs() {
|
||||
// TODO: move into the database config. (we can just show the crontab line for start, later on we can add dropdowns and stuff)
|
||||
this.jobs.stats = new CronJob('* 0 * * * *', Util.saveStatsToDb, null, true);
|
||||
}
|
||||
|
||||
start() {
|
||||
jetpack.dir('uploads/chunks');
|
||||
jetpack.dir('uploads/thumbs/square');
|
||||
|
@ -105,6 +116,8 @@ class Server {
|
|||
log.success(`Backend ready and listening on port ${this.port}`);
|
||||
});
|
||||
server.setTimeout(600000);
|
||||
|
||||
this.createJobs();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,209 @@
|
|||
const si = require('systeminformation');
|
||||
|
||||
class StatsGenerator {
|
||||
static statGenerators = {
|
||||
system: StatsGenerator.getSystemInfo,
|
||||
fileSystems: StatsGenerator.getFileSystemsInfo,
|
||||
uploads: StatsGenerator.getUploadsInfo,
|
||||
users: StatsGenerator.getUsersInfo,
|
||||
albums: StatsGenerator.getAlbumStats
|
||||
};
|
||||
|
||||
static keyOrder = Object.keys(StatsGenerator.statGenerators);
|
||||
|
||||
static async getSystemInfo() {
|
||||
const os = await si.osInfo();
|
||||
|
||||
const currentLoad = await si.currentLoad();
|
||||
const mem = await si.mem();
|
||||
const time = si.time();
|
||||
const nodeUptime = process.uptime();
|
||||
|
||||
return {
|
||||
'Platform': `${os.platform} ${os.arch}`,
|
||||
'Distro': `${os.distro} ${os.release}`,
|
||||
'Kernel': os.kernel,
|
||||
'CPU Load': `${currentLoad.currentload.toFixed(1)}%`,
|
||||
'CPUs Load': currentLoad.cpus.map(cpu => `${cpu.load.toFixed(1)}%`).join(', '),
|
||||
'System Memory': {
|
||||
value: {
|
||||
used: mem.active,
|
||||
total: mem.total
|
||||
},
|
||||
type: 'byteUsage'
|
||||
},
|
||||
'Memory Usage': {
|
||||
value: process.memoryUsage().rss,
|
||||
type: 'byte'
|
||||
},
|
||||
'System Uptime': {
|
||||
value: time.uptime,
|
||||
type: 'time'
|
||||
},
|
||||
'Node.js': `${process.versions.node}`,
|
||||
'Service Uptime': {
|
||||
value: Math.floor(nodeUptime),
|
||||
type: 'time'
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
static async getFileSystemsInfo() {
|
||||
const stats = {};
|
||||
|
||||
const fsSize = await si.fsSize();
|
||||
for (const fs of fsSize) {
|
||||
stats[`${fs.fs} (${fs.type}) on ${fs.mount}`] = {
|
||||
value: {
|
||||
total: fs.size,
|
||||
used: fs.used
|
||||
},
|
||||
type: 'byteUsage'
|
||||
};
|
||||
}
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
static async getUploadsInfo(db) {
|
||||
const stats = {
|
||||
'Total': 0,
|
||||
'Images': 0,
|
||||
'Videos': 0,
|
||||
'Others': {
|
||||
data: {},
|
||||
count: 0,
|
||||
type: 'detailed'
|
||||
},
|
||||
'Size in DB': {
|
||||
value: 0,
|
||||
type: 'byte'
|
||||
}
|
||||
};
|
||||
|
||||
const getFilesCountAndSize = async () => {
|
||||
const uploads = await db.table('files').select('size');
|
||||
|
||||
return {
|
||||
'Total': uploads.length,
|
||||
'Size in DB': {
|
||||
value: uploads.reduce((acc, upload) => acc + parseInt(upload.size, 10), 0),
|
||||
type: 'byte'
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const getImagesCount = async () => {
|
||||
const Images = await db.table('files')
|
||||
.where('type', 'like', `image/%`)
|
||||
.count('id as count')
|
||||
.then(rows => rows[0].count);
|
||||
|
||||
return { Images };
|
||||
};
|
||||
|
||||
const getVideosCount = async () => {
|
||||
const Videos = await db.table('files')
|
||||
.where('type', 'like', `video/%`)
|
||||
.count('id as count')
|
||||
.then(rows => rows[0].count);
|
||||
|
||||
return { Videos };
|
||||
};
|
||||
|
||||
const getOthersCount = async () => {
|
||||
// rename to key, value from type, count
|
||||
const data = await db.table('files')
|
||||
.select('type as key')
|
||||
.count('id as value')
|
||||
.whereNot('type', 'like', `image/%`)
|
||||
.whereNot('type', 'like', `video/%`)
|
||||
.groupBy('key')
|
||||
.orderBy('value', 'desc');
|
||||
|
||||
const count = data.reduce((acc, val) => acc + val.value, 0);
|
||||
|
||||
return {
|
||||
Others: {
|
||||
data,
|
||||
count,
|
||||
type: 'detailed'
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const result = await Promise.all([getFilesCountAndSize(), getImagesCount(), getVideosCount(), getOthersCount()]);
|
||||
|
||||
return { ...stats, ...Object.assign({}, ...result) };
|
||||
}
|
||||
|
||||
static async getUsersInfo(db) {
|
||||
const stats = {
|
||||
Total: 0,
|
||||
Admins: 0,
|
||||
Disabled: 0
|
||||
};
|
||||
|
||||
const users = await db.table('users');
|
||||
stats.Total = users.length;
|
||||
|
||||
for (const user of users) {
|
||||
if (!user.enabled) {
|
||||
stats.Disabled++;
|
||||
}
|
||||
|
||||
if (user.isAdmin) {
|
||||
stats.Admins++;
|
||||
}
|
||||
}
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
static async getAlbumStats(db) {
|
||||
const stats = {
|
||||
'Total': 0,
|
||||
'NSFW': 0,
|
||||
'Generated archives': 0,
|
||||
'Generated identifiers': 0,
|
||||
'Files in albums': 0
|
||||
};
|
||||
|
||||
const albums = await db.table('albums');
|
||||
stats.Total = albums.length;
|
||||
for (const album of albums) {
|
||||
if (album.nsfw) stats.NSFW++;
|
||||
if (album.zipGeneratedAt) stats['Generated archives']++; // XXX: Bobby checks each after if a zip really exists on the disk. Is it really needed?
|
||||
}
|
||||
|
||||
stats['Generated identifiers'] = await db.table('albumsLinks').count('id as count').then(rows => rows[0].count);
|
||||
stats['Files in albums'] = await db.table('albumsFiles')
|
||||
.whereNotNull('albumId')
|
||||
.count('id as count')
|
||||
.then(rows => rows[0].count);
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
static async getStats(db) {
|
||||
const res = {};
|
||||
|
||||
for (const [name, funct] of Object.entries(StatsGenerator.statGenerators)) {
|
||||
res[name] = await funct(db);
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
static async getMissingStats(db, existingStats) {
|
||||
const res = {};
|
||||
|
||||
for (const [name, funct] of Object.entries(StatsGenerator.statGenerators)) {
|
||||
if (existingStats.indexOf(name) === -1) res[name] = await funct(db);
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = StatsGenerator;
|
|
@ -3,27 +3,20 @@ const jetpack = require('fs-jetpack');
|
|||
const randomstring = require('randomstring');
|
||||
const path = require('path');
|
||||
const JWT = require('jsonwebtoken');
|
||||
const db = require('knex')({
|
||||
client: process.env.DB_CLIENT,
|
||||
connection: {
|
||||
host: process.env.DB_HOST,
|
||||
user: process.env.DB_USER,
|
||||
password: process.env.DB_PASSWORD,
|
||||
database: process.env.DB_DATABASE,
|
||||
filename: path.join(__dirname, '../../../database/database.sqlite')
|
||||
},
|
||||
useNullAsDefault: process.env.DB_CLIENT === 'sqlite'
|
||||
});
|
||||
const db = require('../structures/Database');
|
||||
const moment = require('moment');
|
||||
const Zip = require('adm-zip');
|
||||
const uuidv4 = require('uuid/v4');
|
||||
|
||||
const log = require('./Log');
|
||||
const ThumbUtil = require('./ThumbUtil');
|
||||
const StatsGenerator = require('./StatsGenerator');
|
||||
|
||||
const blockedExtensions = process.env.BLOCKED_EXTENSIONS.split(',');
|
||||
const preserveExtensions = ['.tar.gz', '.tar.z', '.tar.bz2', '.tar.lzma', '.tar.lzo', '.tar.xz'];
|
||||
|
||||
let statsLastSavedTime = null;
|
||||
|
||||
class Util {
|
||||
static uploadPath = path.join(__dirname, '../../../', process.env.UPLOAD_FOLDER);
|
||||
|
||||
|
@ -316,6 +309,35 @@ class Util {
|
|||
|
||||
return extname + multi;
|
||||
}
|
||||
|
||||
static async saveStatsToDb() {
|
||||
// if we alredy saved a stats to the db, and there were no new changes to the db since then
|
||||
// skip generating and saving new stats.
|
||||
// XXX: Should we save non-db related statistics to the database anyway? (like performance, disk usage)
|
||||
if (statsLastSavedTime && statsLastSavedTime > db.userParams.lastMutationTime) {
|
||||
return;
|
||||
}
|
||||
|
||||
const now = moment.utc().toDate();
|
||||
const stats = await StatsGenerator.getStats(db);
|
||||
|
||||
let batchId = 1;
|
||||
|
||||
const res = (await db('statistics').max({ lastBatch: 'batchId' }))[0];
|
||||
if (res && res.lastBatch) {
|
||||
batchId = res.lastBatch + 1;
|
||||
}
|
||||
|
||||
try {
|
||||
for (const [type, data] of Object.entries(stats)) {
|
||||
await db.table('statistics').insert({ type, data: JSON.stringify(data), createdAt: now, batchId });
|
||||
}
|
||||
|
||||
statsLastSavedTime = now.getTime();
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Util;
|
||||
|
|
Loading…
Reference in New Issue