2022-06-23 13:58:36 -07:00
|
|
|
"use strict";
|
|
|
|
|
var DataReader = require("./DataReader");
|
|
|
|
|
var utils = require("../utils");
|
2013-10-13 12:56:07 -04:00
|
|
|
|
2015-04-09 21:14:23 +02:00
|
|
|
function StringReader(data) {
|
Rewrite code into workers
This commit addresses the timeout issue. The current API is synchronous : if
JSZip takes too much time to finish its task, the page crashes
(it freezes during the task anyway). This commit does a the following :
- rewrite the code into workers which can be asynchronous
- add the needed public methods
- add nodejs stream support
- break the compatibility with existing code
Workers
-------
A worker is like a nodejs stream but with some differences. On the good side :
- it works on IE 6-9 without any issue / polyfill
- it weights less than the full dependencies bundled with browserify
- it forwards errors (no need to declare an error handler EVERYWHERE)
On the bad side :
To get sync AND async methods on the public API without duplicating a lot of
code, this class has `isSync` attribute and some if/then to choose between
doing stuff now, or using an async callback. It is dangerously close to
releasing Zalgo (see http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony for more).
A chunk is an object with 2 attributes : `meta` and `data`. The former is an
object containing anything (`percent` for example), see each worker for more
details. The latter is the real data (String, Uint8Array, etc).
Public API
----------
Each method generating data (generate, asText, etc) gain a stream sibling :
generateStream, asTextStream, etc.
This will need a solid discussion because I'm not really satified with this.
Nodejs stream support
---------------------
With this commit, `file(name, data)` accepts a nodejs stream as data. It also
adds a `asNodejsStream()` on the StreamHelper.
Breaking changes
----------------
The undocumented JSZip.compressions object changed : the object now returns
workers to do the job, the previous methods are not used anymore.
Not broken yet, but the the `checkCRC32` (when loading a zip file, it
synchronously check the crc32 of every files) will need to be replaced.
2014-05-02 20:06:26 +02:00
|
|
|
DataReader.call(this, data);
|
2013-10-13 12:56:07 -04:00
|
|
|
}
|
Rewrite code into workers
This commit addresses the timeout issue. The current API is synchronous : if
JSZip takes too much time to finish its task, the page crashes
(it freezes during the task anyway). This commit does a the following :
- rewrite the code into workers which can be asynchronous
- add the needed public methods
- add nodejs stream support
- break the compatibility with existing code
Workers
-------
A worker is like a nodejs stream but with some differences. On the good side :
- it works on IE 6-9 without any issue / polyfill
- it weights less than the full dependencies bundled with browserify
- it forwards errors (no need to declare an error handler EVERYWHERE)
On the bad side :
To get sync AND async methods on the public API without duplicating a lot of
code, this class has `isSync` attribute and some if/then to choose between
doing stuff now, or using an async callback. It is dangerously close to
releasing Zalgo (see http://blog.izs.me/post/59142742143/designing-apis-for-asynchrony for more).
A chunk is an object with 2 attributes : `meta` and `data`. The former is an
object containing anything (`percent` for example), see each worker for more
details. The latter is the real data (String, Uint8Array, etc).
Public API
----------
Each method generating data (generate, asText, etc) gain a stream sibling :
generateStream, asTextStream, etc.
This will need a solid discussion because I'm not really satified with this.
Nodejs stream support
---------------------
With this commit, `file(name, data)` accepts a nodejs stream as data. It also
adds a `asNodejsStream()` on the StreamHelper.
Breaking changes
----------------
The undocumented JSZip.compressions object changed : the object now returns
workers to do the job, the previous methods are not used anymore.
Not broken yet, but the the `checkCRC32` (when loading a zip file, it
synchronously check the crc32 of every files) will need to be replaced.
2014-05-02 20:06:26 +02:00
|
|
|
utils.inherits(StringReader, DataReader);
|
2013-10-13 12:56:07 -04:00
|
|
|
/**
|
|
|
|
|
* @see DataReader.byteAt
|
|
|
|
|
*/
|
|
|
|
|
StringReader.prototype.byteAt = function(i) {
|
2016-03-06 19:29:44 +01:00
|
|
|
return this.data.charCodeAt(this.zero + i);
|
2013-10-13 12:56:07 -04:00
|
|
|
};
|
|
|
|
|
/**
|
|
|
|
|
* @see DataReader.lastIndexOfSignature
|
|
|
|
|
*/
|
|
|
|
|
StringReader.prototype.lastIndexOfSignature = function(sig) {
|
2016-03-06 19:29:44 +01:00
|
|
|
return this.data.lastIndexOf(sig) - this.zero;
|
2013-10-13 12:56:07 -04:00
|
|
|
};
|
2014-02-23 22:02:46 +01:00
|
|
|
/**
|
|
|
|
|
* @see DataReader.readAndCheckSignature
|
|
|
|
|
*/
|
|
|
|
|
StringReader.prototype.readAndCheckSignature = function (sig) {
|
|
|
|
|
var data = this.readData(4);
|
|
|
|
|
return sig === data;
|
|
|
|
|
};
|
2013-10-13 12:56:07 -04:00
|
|
|
/**
|
|
|
|
|
* @see DataReader.readData
|
|
|
|
|
*/
|
|
|
|
|
StringReader.prototype.readData = function(size) {
|
|
|
|
|
this.checkOffset(size);
|
|
|
|
|
// this will work because the constructor applied the "& 0xff" mask.
|
2016-03-06 19:29:44 +01:00
|
|
|
var result = this.data.slice(this.zero + this.index, this.zero + this.index + size);
|
2013-10-13 12:56:07 -04:00
|
|
|
this.index += size;
|
|
|
|
|
return result;
|
|
|
|
|
};
|
|
|
|
|
module.exports = StringReader;
|