Create CachingMiddleware for zipping and caching.

This commit is contained in:
Chad Weider 2012-01-29 00:42:54 -08:00
parent 43f4184e8d
commit bb10f026ca
3 changed files with 190 additions and 89 deletions

View File

@ -31,6 +31,7 @@ var async = require('async');
var express = require('express'); var express = require('express');
var path = require('path'); var path = require('path');
var minify = require('./utils/Minify'); var minify = require('./utils/Minify');
var CachingMiddleware = require('./utils/caching_middleware');
var formidable = require('formidable'); var formidable = require('formidable');
var apiHandler; var apiHandler;
var exportHandler; var exportHandler;
@ -154,7 +155,8 @@ async.waterfall([
}); });
//serve minified files //serve minified files
app.all('/minified/:filename', minify.minifyJS); var assetCache = new CachingMiddleware;
app.all('/minified/:filename', assetCache.handle, minify.minifyJS);
//checks for padAccess //checks for padAccess
function hasPadAccess(req, res, callback) function hasPadAccess(req, res, callback)

View File

@ -27,16 +27,12 @@ var cleanCSS = require('clean-css');
var jsp = require("uglify-js").parser; var jsp = require("uglify-js").parser;
var pro = require("uglify-js").uglify; var pro = require("uglify-js").uglify;
var path = require('path'); var path = require('path');
var Buffer = require('buffer').Buffer;
var zlib = require('zlib');
var RequireKernel = require('require-kernel'); var RequireKernel = require('require-kernel');
var server = require('../server'); var server = require('../server');
var os = require('os');
var ROOT_DIR = path.normalize(__dirname + "/../" ); var ROOT_DIR = path.normalize(__dirname + "/../" );
var JS_DIR = ROOT_DIR + '../static/js/'; var JS_DIR = ROOT_DIR + '../static/js/';
var CSS_DIR = ROOT_DIR + '../static/css/'; var CSS_DIR = ROOT_DIR + '../static/css/';
var CACHE_DIR = ROOT_DIR + '../var/';
var TAR_PATH = path.join(__dirname, 'tar.json'); var TAR_PATH = path.join(__dirname, 'tar.json');
var tar = JSON.parse(fs.readFileSync(TAR_PATH, 'utf8')); var tar = JSON.parse(fs.readFileSync(TAR_PATH, 'utf8'));
@ -110,36 +106,8 @@ function _handle(req, res, jsFilename, jsFiles) {
function respondMinified() function respondMinified()
{ {
var result = undefined; var result = undefined;
var latestModification = new Date(res.getHeader('last-modified'));
async.series([
function(callback)
{
//check the modification time of the minified js
fs.stat(CACHE_DIR + "/minified_" + jsFilename, function(err, stats)
{
if(err && err.code != "ENOENT")
{
ERR(err, callback);
return;
}
//there is no minfied file or there new changes since this file was generated, so continue generating this file
if((err && err.code == "ENOENT") || stats.mtime.getTime() < latestModification)
{
callback();
}
//the minified file is still up to date, stop minifying
else
{
callback("stop");
}
});
},
//load all js files
function (callback)
{
var values = []; var values = [];
res.writeHead(200, {});
tarCode( tarCode(
jsFiles jsFiles
, function (content) {values.push(content)} , function (content) {values.push(content)}
@ -147,59 +115,15 @@ function _handle(req, res, jsFilename, jsFiles) {
if(ERR(err)) return; if(ERR(err)) return;
result = values.join(''); result = values.join('');
callback(); res.write(result);
}); res.end();
},
//put all together and write it into a file
function(callback)
{
async.parallel([
//write the results plain in a file
function(callback)
{
fs.writeFile(CACHE_DIR + "minified_" + jsFilename, result, "utf8", callback);
},
//write the results compressed in a file
function(callback)
{
zlib.gzip(result, function(err, compressedResult){
//weird gzip bug that returns 0 instead of null if everything is ok
err = err === 0 ? null : err;
if(ERR(err, callback)) return;
fs.writeFile(CACHE_DIR + "minified_" + jsFilename + ".gz", compressedResult, callback);
});
} }
],callback); );
}
], function(err)
{
if(err && err != "stop")
{
if(ERR(err)) return;
}
//check if gzip is supported by this browser
var gzipSupport = req.header('Accept-Encoding', '').indexOf('gzip') != -1;
var pathStr;
if(gzipSupport && os.type().indexOf("Windows") == -1)
{
pathStr = path.normalize(CACHE_DIR + "minified_" + jsFilename + ".gz");
res.header('Content-Encoding', 'gzip');
}
else
{
pathStr = path.normalize(CACHE_DIR + "minified_" + jsFilename );
}
res.sendfile(pathStr, { maxAge: server.maxAge });
})
} }
//minifying is disabled, so put the files together in one file //minifying is disabled, so put the files together in one file
function respondRaw() function respondRaw()
{ {
res.writeHead(200, {});
tarCode( tarCode(
jsFiles jsFiles
, function (content) {res.write(content)} , function (content) {res.write(content)}

View File

@ -0,0 +1,175 @@
/*
* 2011 Peter 'Pita' Martischka (Primary Technology Ltd)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var async = require('async');
var Buffer = require('buffer').Buffer;
var fs = require('fs');
var path = require('path');
var server = require('../server');
var zlib = require('zlib');
var util = require('util');
var ROOT_DIR = path.normalize(__dirname + "/../");
var CACHE_DIR = ROOT_DIR + '../var/';
var responseCache = {};
/*
This caches and compresses 200 and 404 responses to GET and HEAD requests.
TODO: Caching and compressing are solved problems, a middleware configuration
should replace this.
*/
function CachingMiddleware() {
}
CachingMiddleware.prototype = new function () {
function handle(req, res, next) {
if (!(req.method == "GET" || req.method == "HEAD")) {
return next(undefined, req, res);
}
var old_req = {};
var old_res = {};
var supportsGzip =
req.header('Accept-Encoding', '').indexOf('gzip') != -1;
var path = require('url').parse(req.url).path;
var cacheKey = (new Buffer(path)).toString('base64').replace(/[\/\+=]/g, '');
fs.stat(CACHE_DIR + 'minified_' + cacheKey, function (error, stats) {
var modifiedSince = (req.headers['if-modified-since']
&& new Date(req.headers['if-modified-since']));
var lastModifiedCache = stats && stats.mtime;
if (lastModifiedCache) {
req.headers['if-modified-since'] = lastModifiedCache.toUTCString();
} else {
delete req.headers['if-modified-since'];
}
// Always issue get to downstream.
old_req.method = req.method;
req.method = 'GET';
var expirationDate = new Date(((responseCache[cacheKey] || {}).headers || {})['expires']);
if (expirationDate > new Date()) {
// Our cached version is still valid.
return respond();
}
var _headers = {};
old_res.setHeader = res.setHeader;
res.setHeader = function (key, value) {
_headers[key.toLowerCase()] = value;
old_res.setHeader.call(res, key, value);
};
old_res.writeHead = res.writeHead;
res.writeHead = function (status, headers) {
var lastModified = (res.getHeader('last-modified')
&& new Date(res.getHeader('last-modified')));
res.writeHead = old_res.writeHead;
if (status == 200 || status == 404) {
// Update cache
var buffer = '';
Object.keys(headers).forEach(function (key) {
res.setHeader(key, headers[key]);
});
headers = _headers;
responseCache[cacheKey] = {statusCode: status, headers: headers};
old_res.write = res.write;
old_res.end = res.end;
res.write = function(data, encoding) {
buffer += data.toString(encoding);
};
res.end = function(data, encoding) {
async.parallel([
function (callback) {
var path = CACHE_DIR + 'minified_' + cacheKey;
fs.writeFile(path, buffer, function (error, stats) {
callback();
});
}
, function (callback) {
var path = CACHE_DIR + 'minified_' + cacheKey + '.gz';
zlib.gzip(buffer, function(error, content) {
if (error) {
callback();
} else {
fs.writeFile(path, content, function (error, stats) {
callback();
});
}
});
}
], respond);
};
} else if (status == 304) {
// Nothing new changed from the cached version.
old_res.write = res.write;
old_res.end = res.end;
res.write = function(data, encoding) {};
res.end = function(data, encoding) { respond() };
} else {
res.writeHead(status, headers);
}
};
next(undefined, req, res);
// This handles read/write synchronization as well as its predecessor,
// which is to say, not at all.
// TODO: Implement locking on write or ditch caching of gzip and use
// existing middlewares.
function respond() {
req.method = old_req.method || req.method;
res.write = old_res.write || res.write;
res.end = old_res.end || res.end;
var headers = responseCache[cacheKey].headers;
var statusCode = responseCache[cacheKey].statusCode;
var pathStr = CACHE_DIR + 'minified_' + cacheKey;
if (supportsGzip) {
pathStr = pathStr + '.gz';
headers['content-encoding'] = 'gzip';
}
var lastModified = (headers['last-modified']
&& new Date(headers['last-modified']));
if (statusCode == 200 && lastModified <= modifiedSince) {
res.writeHead(304, headers);
res.end();
} else if (req.method == 'GET') {
var readStream = fs.createReadStream(pathStr);
res.writeHead(statusCode, headers);
util.pump(readStream, res);
} else {
res.writeHead(200, headers);
res.end();
}
}
});
}
this.handle = handle;
}();
module.exports = CachingMiddleware;