diff --git a/.gitignore b/.gitignore index e69de29..4243830 100644 --- a/.gitignore +++ b/.gitignore @@ -0,0 +1 @@ +/compression-1.0.1.tgz diff --git a/nodejs-compression.spec b/nodejs-compression.spec new file mode 100644 index 0000000..87bca32 --- /dev/null +++ b/nodejs-compression.spec @@ -0,0 +1,75 @@ +%{?nodejs_find_provides_and_requires} + +%global enable_tests 0 + +Name: nodejs-compression +Version: 1.0.1 +Release: 1%{?dist} +Summary: Compression middleware for Node.js and Connect +License: MIT +Group: System Environment/Libraries +URL: https://github.com/expressjs/compression +Source0: http://registry.npmjs.org/compression/-/compression-%{version}.tgz +# The test file is missing from the NPM tarball. +Source1: https://raw.github.com/expressjs/compression/ff7df439670a89129ce4d738ca5d0c72ce663b2a/test.js + +BuildArch: noarch +%if 0%{?fedora} >= 19 +ExclusiveArch: %{nodejs_arches} noarch +%else +ExclusiveArch: %{ix86} x86_64 %{arm} noarch +%endif + +BuildRequires: nodejs-packaging + +%if 0%{?enable_tests} +BuildRequires: mocha +BuildRequires: npm(bytes) +BuildRequires: npm(compressible) +BuildRequires: npm(connect) +BuildRequires: npm(negotiator) +BuildRequires: npm(should) +BuildRequires: npm(supertest) +%endif + +%description +%{summary}. + + +%prep +%setup -q -n package +cp -p %{SOURCE1} . + +%nodejs_fixdep negotiator '~0.4.2' + + +%build +#nothing to do + + +%install +mkdir -p %{buildroot}%{nodejs_sitelib}/compression +cp -pr package.json index.js \ + %{buildroot}%{nodejs_sitelib}/compression + +%nodejs_symlink_deps + + +%if 0%{?enable_tests} +%check +%nodejs_symlink_deps --check +/usr/bin/mocha --require should --reporter spec +%endif + + +%files +%doc HISTORY.md README.md +%{nodejs_sitelib}/compression + + +%changelog +* Wed Mar 12 2014 Jamie Nguyen - 1.0.1-1 +- update to upstream release 1.0.1 + +* Sat Mar 08 2014 Jamie Nguyen - 1.0.0-1 +- initial package diff --git a/sources b/sources index e69de29..19405b7 100644 --- a/sources +++ b/sources @@ -0,0 +1 @@ +c5a05a4de0a6931cf88e12f9ac69261d compression-1.0.1.tgz diff --git a/test.js b/test.js new file mode 100644 index 0000000..b8d1831 --- /dev/null +++ b/test.js @@ -0,0 +1,202 @@ +var assert = require('assert'); +var connect = require('connect'); +var request = require('supertest'); + +var compress = require('./'); + +var app = connect(); +app.use(compress({ + threshold: 0 +})); + +app.use(connect.static(__dirname)); + +var app2 = connect(); +app2.use(compress({ + threshold: '1kb' +})); + +app2.use('/response/small', function(req, res){ + res.setHeader('Content-Type', 'text/plain'); + res.end('tiny'); +}); + +app2.use('/response/large', function(req, res){ + res.setHeader('Content-Type', 'text/plain'); + res.end(new Buffer(2048)); +}); + +app2.use('/stream/small/length', function(req, res){ + res.setHeader('Content-Type', 'text/plain'); + res.setHeader('Content-Length', '1'); + res.write('a'); + res.end(); +}); + +app2.use('/stream/large/length', function(req, res){ + res.setHeader('Content-Type', 'text/plain'); + res.setHeader('Content-Length', '2048'); + res.write(new Buffer(2048)); + res.end(); +}); + +app2.use('/stream/small', function(req, res, next){ + res.setHeader('Content-Type', 'text/plain'); + res.write('a'); + res.end(); +}); + +app2.use('/image', function(req, res){ + res.setHeader('Content-Type', 'image/png'); + res.write(new Buffer(2048)); + res.end(); +}); + +describe('compress()', function(){ + it('should gzip files', function(done){ + request(app) + .get('/package.json') + .set('Accept-Encoding', 'gzip') + .end(function(err, res){ + res.body.should.not.equal('- groceries'); + done(); + }); + }) + + it('should set Content-Encoding', function(done){ + request(app) + .get('/package.json') + .set('Accept-Encoding', 'gzip') + .expect('Content-Encoding', 'gzip', done); + }) + + it('should support HEAD', function(done){ + request(app) + .head('/package.json') + .set('Accept-Encoding', 'gzip') + .expect('', done); + }) + + it('should support conditional GETs', function(done){ + request(app) + .get('/package.json') + .set('Accept-Encoding', 'gzip') + .end(function(err, res){ + var date = res.headers['last-modified']; + request(app) + .get('/package.json') + .set('Accept-Encoding', 'gzip') + .set('If-Modified-Since', date) + .expect(304, done); + }); + }) + + it('should set Vary', function(done){ + request(app) + .get('/package.json') + .set('Accept-Encoding', 'gzip') + .expect('Vary', 'Accept-Encoding', done); + }) + + it('should set Vary even if Accept-Encoding is not set', function(done){ + request(app) + .get('/package.json') + .expect('Vary', 'Accept-Encoding', done); + }) + + it('should not set Vary if Content-Type does not pass filter', function(done){ + request(app2) + .get('/image') + .end(function(err, res){ + res.headers.should.not.have.property('vary'); + done(); + }) + }) + + it('should transfer chunked', function(done){ + request(app) + .get('/package.json') + .set('Accept-Encoding', 'gzip') + .expect('Transfer-Encoding', 'chunked', done); + }) + + it('should remove Content-Length for chunked', function(done){ + request(app) + .get('/package.json') + .set('Accept-Encoding', 'gzip') + .end(function(err, res){ + res.headers.should.not.have.property('content-length'); + done() + }); + }) + + describe('threshold', function(){ + it('should not compress responses below the threshold size', function(done){ + request(app2) + .get('/response/small') + .set('Accept-Encoding', 'gzip') + .end(function(err, res){ + // I don't know how to do this with supertest + // '' or 'identity' should be valid values as well, + // but they are not set by compress. + assert.equal(res.headers['content-encoding'], undefined); + + done() + }) + }) + + it('should compress responses above the threshold size', function(done){ + request(app2) + .get('/response/large') + .set('Accept-Encoding', 'gzip') + .expect('Content-Encoding', 'gzip', done); + }) + + it('should compress when streaming without a content-length', function(done){ + request(app2) + .get('/stream/small') + .set('Accept-Encoding', 'gzip') + .expect('Content-Encoding', 'gzip', done); + }) + + it('should not compress when streaming and content-length is lower than threshold', function(done){ + request(app2) + .get('/stream/small/length') + .set('Accept-Encoding', 'gzip') + .end(function(err, res){ + assert.equal(res.headers['content-encoding'], undefined); + + done() + }) + }) + + it('should compress when streaming and content-length is larger than threshold', function(done){ + request(app2) + .get('/stream/large/length') + .set('Accept-Encoding', 'gzip') + .expect('Content-Encoding', 'gzip', done); + }) + }) + + describe('res.flush()', function () { + it('should always be present', function (done) { + var app = connect(); + + app.use(compress()); + app.use(function (req, res) { + res.flush.should.be.a.Function; + res.statusCode = 204; + res.end(); + }); + + request(app) + .get('/') + .expect(204, done); + }) + + // If anyone knows how to test if the flush works... + // it('should flush the response', function (done) { + + // }) + }) +})