bill пре 1 месец
родитељ
комит
9be2d80b4e
100 измењених фајлова са 20576 додато и 0 уклоњено
  1. BIN
      .DS_Store
  2. 9 0
      libs/jszip-utils/CHANGES.md
  3. 123 0
      libs/jszip-utils/Gruntfile.js
  4. 651 0
      libs/jszip-utils/LICENSE.markdown
  5. 30 0
      libs/jszip-utils/README.markdown
  6. 25 0
      libs/jszip-utils/_config.yml
  7. 26 0
      libs/jszip-utils/bower.json
  8. 18 0
      libs/jszip-utils/component.json
  9. 56 0
      libs/jszip-utils/dist/jszip-utils-ie.js
  10. 1 0
      libs/jszip-utils/dist/jszip-utils-ie.min.js
  11. 160 0
      libs/jszip-utils/dist/jszip-utils.js
  12. 1 0
      libs/jszip-utils/dist/jszip-utils.min.js
  13. 86 0
      libs/jszip-utils/documentation/_layouts/default.html
  14. 6 0
      libs/jszip-utils/documentation/api.md
  15. 85 0
      libs/jszip-utils/documentation/api/getbinarycontent.md
  16. 7 0
      libs/jszip-utils/documentation/css/main.css
  17. 64 0
      libs/jszip-utils/documentation/css/pygments.css
  18. 25 0
      libs/jszip-utils/index.md
  19. 145 0
      libs/jszip-utils/lib/index.js
  20. 43 0
      libs/jszip-utils/lib/index_IE.js
  21. 9 0
      libs/jszip-utils/lib/license_header.js
  22. 42 0
      libs/jszip-utils/package.json
  23. 55 0
      libs/jszip-utils/test/index.html
  24. 1 0
      libs/jszip-utils/test/ref/amount.txt
  25. BIN
      libs/jszip-utils/test/ref/smile.gif
  26. 226 0
      libs/jszip-utils/test/test.js
  27. 204 0
      libs/jszip/CHANGES.md
  28. 651 0
      libs/jszip/LICENSE.markdown
  29. 33 0
      libs/jszip/README.markdown
  30. 37 0
      libs/jszip/deps.js
  31. 11577 0
      libs/jszip/dist/jszip.js
  32. 13 0
      libs/jszip/dist/jszip.min.js
  33. 601 0
      libs/jszip/graph.svg
  34. 330 0
      libs/jszip/index.d.ts
  35. 106 0
      libs/jszip/lib/base64.js
  36. 74 0
      libs/jszip/lib/compressedObject.js
  37. 14 0
      libs/jszip/lib/compressions.js
  38. 77 0
      libs/jszip/lib/crc32.js
  39. 11 0
      libs/jszip/lib/defaults.js
  40. 18 0
      libs/jszip/lib/external.js
  41. 85 0
      libs/jszip/lib/flate.js
  42. 539 0
      libs/jszip/lib/generate/ZipFileWorker.js
  43. 57 0
      libs/jszip/lib/generate/index.js
  44. 55 0
      libs/jszip/lib/index.js
  45. 11 0
      libs/jszip/lib/license_header.js
  46. 88 0
      libs/jszip/lib/load.js
  47. 74 0
      libs/jszip/lib/nodejs/NodejsStreamInputAdapter.js
  48. 42 0
      libs/jszip/lib/nodejs/NodejsStreamOutputAdapter.js
  49. 57 0
      libs/jszip/lib/nodejsUtils.js
  50. 384 0
      libs/jszip/lib/object.js
  51. 10 0
      libs/jszip/lib/readable-stream-browser.js
  52. 57 0
      libs/jszip/lib/reader/ArrayReader.js
  53. 116 0
      libs/jszip/lib/reader/DataReader.js
  54. 19 0
      libs/jszip/lib/reader/NodeBufferReader.js
  55. 38 0
      libs/jszip/lib/reader/StringReader.js
  56. 22 0
      libs/jszip/lib/reader/Uint8ArrayReader.js
  57. 28 0
      libs/jszip/lib/reader/readerFor.js
  58. 7 0
      libs/jszip/lib/signature.js
  59. 26 0
      libs/jszip/lib/stream/ConvertWorker.js
  60. 24 0
      libs/jszip/lib/stream/Crc32Probe.js
  61. 29 0
      libs/jszip/lib/stream/DataLengthProbe.js
  62. 116 0
      libs/jszip/lib/stream/DataWorker.js
  63. 263 0
      libs/jszip/lib/stream/GenericWorker.js
  64. 214 0
      libs/jszip/lib/stream/StreamHelper.js
  65. 38 0
      libs/jszip/lib/support.js
  66. 275 0
      libs/jszip/lib/utf8.js
  67. 501 0
      libs/jszip/lib/utils.js
  68. 261 0
      libs/jszip/lib/zipEntries.js
  69. 293 0
      libs/jszip/lib/zipEntry.js
  70. 133 0
      libs/jszip/lib/zipObject.js
  71. 67 0
      libs/jszip/package.json
  72. 21 0
      libs/jszip/sponsors.md
  73. 101 0
      libs/jszip/tsconfig.json
  74. 247 0
      libs/jszip/vendor/FileSaver.js
  75. 13 0
      libs/npyjs/CHANGELOG.md
  76. 174 0
      libs/npyjs/LICENSE
  77. 99 0
      libs/npyjs/README.md
  78. 1 0
      libs/npyjs/docs/_config.yml
  79. BIN
      libs/npyjs/docs/apl-logo.png
  80. 49 0
      libs/npyjs/docs/index.md
  81. 99 0
      libs/npyjs/index.d.ts
  82. 180 0
      libs/npyjs/index.js
  83. 23 0
      libs/npyjs/package.json
  84. BIN
      libs/npyjs/test/data/10-float16.npy
  85. BIN
      libs/npyjs/test/data/10-float32.npy
  86. BIN
      libs/npyjs/test/data/10-float64.npy
  87. BIN
      libs/npyjs/test/data/10-int16.npy
  88. BIN
      libs/npyjs/test/data/10-int64.npy
  89. BIN
      libs/npyjs/test/data/10-int8.npy
  90. BIN
      libs/npyjs/test/data/100x100x100-float16.npy
  91. BIN
      libs/npyjs/test/data/100x100x100-float32.npy
  92. BIN
      libs/npyjs/test/data/100x100x100-float64.npy
  93. BIN
      libs/npyjs/test/data/100x100x100-int16.npy
  94. BIN
      libs/npyjs/test/data/100x100x100-int64.npy
  95. BIN
      libs/npyjs/test/data/100x100x100-int8.npy
  96. BIN
      libs/npyjs/test/data/4x4x4x4x4-float16.npy
  97. BIN
      libs/npyjs/test/data/4x4x4x4x4-float32.npy
  98. BIN
      libs/npyjs/test/data/4x4x4x4x4-float64.npy
  99. BIN
      libs/npyjs/test/data/4x4x4x4x4-int16.npy
  100. 0 0
      libs/npyjs/test/data/4x4x4x4x4-int64.npy

+ 9 - 0
libs/jszip-utils/CHANGES.md

@@ -0,0 +1,9 @@
+# v0.1.0 2019-05-12
+- Add support for `progress` callback in an `options` object.
+
+# v0.0.2 2014-05-19
+- Drop the code for xhr on `file://` and fix an issue with `file://` to `http://` requests, see [#3](https://github.com/Stuk/jszip-utils/pull/3).
+
+# v0.0.1 2014-04-27
+ - First release.
+

+ 123 - 0
libs/jszip-utils/Gruntfile.js

@@ -0,0 +1,123 @@
+/*jshint node: true */
+"use strict";
+
+module.exports = function(grunt) {
+  // https://wiki.saucelabs.com/display/DOCS/Platform+Configurator
+  // A lot of the browsers seem to time out with Saucelab's unit testing
+  // framework. Here are the browsers that work that get enough coverage for our
+  // needs.
+  var browsers = [
+    {browserName: "chrome"},
+    {browserName: "firefox", platform: "Linux"},
+    {browserName: "internet explorer"}
+  ];
+
+  var tags = [];
+  if (process.env.TRAVIS_PULL_REQUEST && process.env.TRAVIS_PULL_REQUEST != "false") {
+    tags.push("pr" + process.env.TRAVIS_PULL_REQUEST);
+  } else if (process.env.TRAVIS_BRANCH) {
+    tags.push(process.env.TRAVIS_BRANCH);
+  }
+
+  var postBundleWithLicense = function(err, src, done) {
+    if (!err) {
+      // add the license
+      var license = require('fs').readFileSync('lib/license_header.js');
+      done(err, license + src);
+    } else {
+      done(err);
+    }
+  };
+
+  grunt.initConfig({
+    connect: {
+      server: {
+        options: {
+          base: "",
+          port: 8080
+        }
+      }
+    },
+    'saucelabs-qunit': {
+      all: {
+        options: {
+          urls: ["http://127.0.0.1:8080/test/index.html?hidepassed"],
+          build: process.env.TRAVIS_JOB_ID,
+          testname: "qunit tests",
+          tags: tags,
+          // Tests have statusCheckAttempts * pollInterval seconds to complete
+          pollInterval: 2000,
+          statusCheckAttempts: 15,
+          "max-duration": 30,
+          browsers: browsers,
+          maxRetries: 2
+        }
+      }
+    },
+    jshint: {
+      options: {
+        jshintrc: "./.jshintrc"
+      },
+      all: ['./lib/*.js', "Gruntfile.js"]
+    },
+    browserify: {
+      "utils": {
+        files: {
+          'dist/jszip-utils.js': ['lib/index.js']
+        },
+        options: {
+          standalone: 'JSZipUtils',
+          postBundleCB: postBundleWithLicense
+        }
+      },
+      "utils-ie": {
+        files: {
+          'dist/jszip-utils-ie.js': ['lib/index_IE.js']
+        },
+        options: {
+          postBundleCB: postBundleWithLicense
+        }
+      }
+    },
+    uglify: {
+      options: {
+        report: 'gzip',
+        mangle: true,
+        output: {
+          comments: /^!/
+        }
+      },
+      "jszip-utils": {
+        src: 'dist/jszip-utils.js',
+        dest: 'dist/jszip-utils.min.js'
+      },
+      "jszip-utils-ie": {
+        src: 'dist/jszip-utils-ie.js',
+        dest: 'dist/jszip-utils-ie.min.js'
+      }
+    }
+  });
+
+  grunt.loadNpmTasks("grunt-saucelabs");
+  grunt.loadNpmTasks("grunt-contrib-connect");
+  grunt.loadNpmTasks('grunt-browserify');
+  grunt.loadNpmTasks('grunt-contrib-jshint');
+  grunt.loadNpmTasks('grunt-contrib-uglify');
+
+  // A task to cause Grunt to sit and wait, keeping the test server running
+  grunt.registerTask("wait", function() {
+    this.async();
+  });
+
+  grunt.registerTask("test-local", ["build", "connect", "wait"]);
+  grunt.registerTask("test-remote", ["build", "connect", "saucelabs-qunit"]);
+
+  if (process.env.SAUCE_USERNAME && process.env.SAUCE_ACCESS_KEY) {
+    grunt.registerTask("test", ["jshint", "test-remote"]);
+  } else {
+    grunt.registerTask("test", ["jshint", "test-local"]);
+  }
+
+  grunt.registerTask("build", ["browserify", "uglify"]);
+  grunt.registerTask("default", ["jshint", "build"]);
+};

+ 651 - 0
libs/jszip-utils/LICENSE.markdown

@@ -0,0 +1,651 @@
+JSZipUtils is dual licensed. You may use it under the MIT license *or* the GPLv3
+license.
+
+The MIT License
+===============
+
+Copyright (c) 2014 Stuart Knightley, David Duponchel
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+
+GPL version 3
+=============
+
+                    GNU GENERAL PUBLIC LICENSE
+                       Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+                            Preamble
+
+  The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+  The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works.  By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.  We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors.  You can apply it to
+your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+  To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights.  Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+  For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received.  You must make sure that they, too, receive
+or can get the source code.  And you must show them these terms so they
+know their rights.
+
+  Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+  For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software.  For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+  Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so.  This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software.  The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable.  Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products.  If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+  Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary.  To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+                       TERMS AND CONDITIONS
+
+  0. Definitions.
+
+  "This License" refers to version 3 of the GNU General Public License.
+
+  "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+  "The Program" refers to any copyrightable work licensed under this
+License.  Each licensee is addressed as "you".  "Licensees" and
+"recipients" may be individuals or organizations.
+
+  To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy.  The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+  A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+  To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy.  Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+  To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies.  Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+  An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License.  If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+  1. Source Code.
+
+  The "source code" for a work means the preferred form of the work
+for making modifications to it.  "Object code" means any non-source
+form of a work.
+
+  A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+  The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form.  A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+  The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities.  However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work.  For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+  The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+  The Corresponding Source for a work in source code form is that
+same work.
+
+  2. Basic Permissions.
+
+  All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met.  This License explicitly affirms your unlimited
+permission to run the unmodified Program.  The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work.  This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+  You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force.  You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright.  Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+  Conveying under any other circumstances is permitted solely under
+the conditions stated below.  Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+  3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+  No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+  When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+  4. Conveying Verbatim Copies.
+
+  You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+  You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+  5. Conveying Modified Source Versions.
+
+  You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+    a) The work must carry prominent notices stating that you modified
+    it, and giving a relevant date.
+
+    b) The work must carry prominent notices stating that it is
+    released under this License and any conditions added under section
+    7.  This requirement modifies the requirement in section 4 to
+    "keep intact all notices".
+
+    c) You must license the entire work, as a whole, under this
+    License to anyone who comes into possession of a copy.  This
+    License will therefore apply, along with any applicable section 7
+    additional terms, to the whole of the work, and all its parts,
+    regardless of how they are packaged.  This License gives no
+    permission to license the work in any other way, but it does not
+    invalidate such permission if you have separately received it.
+
+    d) If the work has interactive user interfaces, each must display
+    Appropriate Legal Notices; however, if the Program has interactive
+    interfaces that do not display Appropriate Legal Notices, your
+    work need not make them do so.
+
+  A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit.  Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+  6. Conveying Non-Source Forms.
+
+  You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+    a) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by the
+    Corresponding Source fixed on a durable physical medium
+    customarily used for software interchange.
+
+    b) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by a
+    written offer, valid for at least three years and valid for as
+    long as you offer spare parts or customer support for that product
+    model, to give anyone who possesses the object code either (1) a
+    copy of the Corresponding Source for all the software in the
+    product that is covered by this License, on a durable physical
+    medium customarily used for software interchange, for a price no
+    more than your reasonable cost of physically performing this
+    conveying of source, or (2) access to copy the
+    Corresponding Source from a network server at no charge.
+
+    c) Convey individual copies of the object code with a copy of the
+    written offer to provide the Corresponding Source.  This
+    alternative is allowed only occasionally and noncommercially, and
+    only if you received the object code with such an offer, in accord
+    with subsection 6b.
+
+    d) Convey the object code by offering access from a designated
+    place (gratis or for a charge), and offer equivalent access to the
+    Corresponding Source in the same way through the same place at no
+    further charge.  You need not require recipients to copy the
+    Corresponding Source along with the object code.  If the place to
+    copy the object code is a network server, the Corresponding Source
+    may be on a different server (operated by you or a third party)
+    that supports equivalent copying facilities, provided you maintain
+    clear directions next to the object code saying where to find the
+    Corresponding Source.  Regardless of what server hosts the
+    Corresponding Source, you remain obligated to ensure that it is
+    available for as long as needed to satisfy these requirements.
+
+    e) Convey the object code using peer-to-peer transmission, provided
+    you inform other peers where the object code and Corresponding
+    Source of the work are being offered to the general public at no
+    charge under subsection 6d.
+
+  A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+  A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling.  In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage.  For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product.  A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+  "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source.  The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+  If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information.  But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+  The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed.  Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+  Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+  7. Additional Terms.
+
+  "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law.  If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+  When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it.  (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.)  You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+  Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+    a) Disclaiming warranty or limiting liability differently from the
+    terms of sections 15 and 16 of this License; or
+
+    b) Requiring preservation of specified reasonable legal notices or
+    author attributions in that material or in the Appropriate Legal
+    Notices displayed by works containing it; or
+
+    c) Prohibiting misrepresentation of the origin of that material, or
+    requiring that modified versions of such material be marked in
+    reasonable ways as different from the original version; or
+
+    d) Limiting the use for publicity purposes of names of licensors or
+    authors of the material; or
+
+    e) Declining to grant rights under trademark law for use of some
+    trade names, trademarks, or service marks; or
+
+    f) Requiring indemnification of licensors and authors of that
+    material by anyone who conveys the material (or modified versions of
+    it) with contractual assumptions of liability to the recipient, for
+    any liability that these contractual assumptions directly impose on
+    those licensors and authors.
+
+  All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10.  If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term.  If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+  If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+  Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+  8. Termination.
+
+  You may not propagate or modify a covered work except as expressly
+provided under this License.  Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+  However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+  Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+  Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License.  If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+  9. Acceptance Not Required for Having Copies.
+
+  You are not required to accept this License in order to receive or
+run a copy of the Program.  Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance.  However,
+nothing other than this License grants you permission to propagate or
+modify any covered work.  These actions infringe copyright if you do
+not accept this License.  Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+  10. Automatic Licensing of Downstream Recipients.
+
+  Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License.  You are not responsible
+for enforcing compliance by third parties with this License.
+
+  An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations.  If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+  You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License.  For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+  11. Patents.
+
+  A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based.  The
+work thus licensed is called the contributor's "contributor version".
+
+  A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version.  For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+  Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+  In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement).  To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+  If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients.  "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+  If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+  A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License.  You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+  Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+  12. No Surrender of Others' Freedom.
+
+  If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all.  For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+  13. Use with the GNU Affero General Public License.
+
+  Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work.  The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+  14. Revised Versions of this License.
+
+  The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+  Each version is given a distinguishing version number.  If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation.  If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+  If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+  Later license versions may give you additional or different
+permissions.  However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+  15. Disclaimer of Warranty.
+
+  THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+  16. Limitation of Liability.
+
+  IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+  17. Interpretation of Sections 15 and 16.
+
+  If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+                     END OF TERMS AND CONDITIONS

+ 30 - 0
libs/jszip-utils/README.markdown

@@ -0,0 +1,30 @@
+JSZipUtils
+==========
+
+A collection of cross-browser utilities to go along with JSZip, see
+http://stuk.github.io/jszip-utils for all the documentation.
+
+It has two parts, one for every browsers and one for IE < 10. To use it :
+
+```html
+<script type="text/javascript" src="dist/jszip-utils.js"></script>
+<!--
+Mandatory in IE 6, 7, 8 and 9.
+-->
+<!--[if IE]>
+<script type="text/javascript" src="dist/jszip-utils-ie.js"></script>
+<![endif]-->
+```
+
+Development
+-----------
+
+Run `npm test` to lint, build, and launch a server at http://localhost:8080/test/ . Open the page in a browser to verify that the tests are passing.
+
+If you have a Saucelabs account set the `SAUCE_USERNAME` and `SAUCE_ACCESS_KEY`environment variables to test remotely.
+
+License
+-------
+
+JSZipUtils is dual-licensed. You may use it under the MIT license *or* the GPLv3
+license. See LICENSE.markdown.

+ 25 - 0
libs/jszip-utils/_config.yml

@@ -0,0 +1,25 @@
+# will be overwritten by github, see https://help.github.com/articles/using-jekyll-with-pages
+safe: true
+lsi: false
+pygments: true
+source: ./
+# /overwritten
+
+baseurl: /jszip-utils
+
+layouts: ./documentation/_layouts
+permalink: none
+exclude: ['bin', 'README.md', 'node_modules']
+
+markdown: redcarpet
+redcarpet:
+    extensions: [
+        'no_intra_emphasis',
+        'fenced_code_blocks',
+        'autolink',
+        'strikethrough',
+        'superscript',
+        'with_toc_data',
+        'tables',
+        'hardwrap'
+    ]

+ 26 - 0
libs/jszip-utils/bower.json

@@ -0,0 +1,26 @@
+{
+  "name": "jszip-utils",
+  "version": "0.1.0",
+  "homepage": "https://github.com/Stuk/jszip-utils",
+  "authors": [
+    "Stuart Knightley <stuart@stuartk.com>",
+    "David Duponchel <d.duponchel@gmail.com>"
+  ],
+  "description": "A collection of cross-browser utilities to go along with JSZip.",
+  "main": "dist/jszip-utils.js",
+  "keywords": [
+    "JSZip",
+    "ajax",
+    "cross browser",
+    "IE",
+    "Internet Explorer"
+  ],
+  "license": "MIT or GPLv3",
+  "ignore": [
+    "**/.*",
+    "node_modules",
+    "bower_components",
+    "test",
+    "tests"
+  ]
+}

+ 18 - 0
libs/jszip-utils/component.json

@@ -0,0 +1,18 @@
+{
+  "name": "jszip-utils",
+  "repo": "Stuk/jszip-utils",
+  "description": "A collection of cross-browser utilities to go along with JSZip.",
+  "version": "0.1.0",
+  "keywords": [
+    "JSZip",
+    "ajax",
+    "cross browser",
+    "IE",
+    "Internet Explorer"
+  ],
+  "main": "dist/jszip-utils.js",
+  "license": "MIT or GPLv3",
+  "scripts": [
+    "dist/jszip-utils.js"
+  ]
+}

+ 56 - 0
libs/jszip-utils/dist/jszip-utils-ie.js

@@ -0,0 +1,56 @@
+/*@preserve
+
+JSZipUtils - A collection of cross-browser utilities to go along with JSZip.
+<http://stuk.github.io/jszip-utils>
+
+(c) 2014-2019 Stuart Knightley, David Duponchel
+Dual licenced under the MIT license or GPLv3. See https://raw.github.com/Stuk/jszip-utils/master/LICENSE.markdown.
+
+*/
+;(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);throw new Error("Cannot find module '"+o+"'")}var f=n[o]={exports:{}};t[o][0].call(f.exports,function(e){var n=t[o][1][e];return s(n?n:e)},f,f.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
+var global=typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {};/* jshint evil: true, newcap: false */
+/* global IEBinaryToArray_ByteStr, IEBinaryToArray_ByteStr_Last */
+"use strict";
+
+// Adapted from http://stackoverflow.com/questions/1095102/how-do-i-load-binary-image-data-using-javascript-and-xmlhttprequest
+var IEBinaryToArray_ByteStr_Script =
+    "<!-- IEBinaryToArray_ByteStr -->\r\n"+
+    "<script type='text/vbscript'>\r\n"+
+    "Function IEBinaryToArray_ByteStr(Binary)\r\n"+
+    "   IEBinaryToArray_ByteStr = CStr(Binary)\r\n"+
+    "End Function\r\n"+
+    "Function IEBinaryToArray_ByteStr_Last(Binary)\r\n"+
+    "   Dim lastIndex\r\n"+
+    "   lastIndex = LenB(Binary)\r\n"+
+    "   if lastIndex mod 2 Then\r\n"+
+    "       IEBinaryToArray_ByteStr_Last = Chr( AscB( MidB( Binary, lastIndex, 1 ) ) )\r\n"+
+    "   Else\r\n"+
+    "       IEBinaryToArray_ByteStr_Last = "+'""'+"\r\n"+
+    "   End If\r\n"+
+    "End Function\r\n"+
+    "</script>\r\n";
+
+// inject VBScript
+document.write(IEBinaryToArray_ByteStr_Script);
+
+global.JSZipUtils._getBinaryFromXHR = function (xhr) {
+    var binary = xhr.responseBody;
+    var byteMapping = {};
+    for ( var i = 0; i < 256; i++ ) {
+        for ( var j = 0; j < 256; j++ ) {
+            byteMapping[ String.fromCharCode( i + (j << 8) ) ] =
+                String.fromCharCode(i) + String.fromCharCode(j);
+        }
+    }
+    var rawBytes = IEBinaryToArray_ByteStr(binary);
+    var lastChr = IEBinaryToArray_ByteStr_Last(binary);
+    return rawBytes.replace(/[\s\S]/g, function( match ) {
+        return byteMapping[match];
+    }) + lastChr;
+};
+
+// enforcing Stuk's coding style
+// vim: set shiftwidth=4 softtabstop=4:
+
+},{}]},{},[1])
+;

Разлика између датотеке није приказан због своје велике величине
+ 1 - 0
libs/jszip-utils/dist/jszip-utils-ie.min.js


Разлика између датотеке није приказан због своје велике величине
+ 160 - 0
libs/jszip-utils/dist/jszip-utils.js


Разлика између датотеке није приказан због своје велике величине
+ 1 - 0
libs/jszip-utils/dist/jszip-utils.min.js


+ 86 - 0
libs/jszip-utils/documentation/_layouts/default.html

@@ -0,0 +1,86 @@
+<!DOCTYPE html>
+<html lang="en">
+  <head>
+    <meta charset="utf-8">
+    <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
+    <meta name="viewport" content="width=device-width, initial-scale=1">
+    <meta name="description" content="A collection of cross-browser utilities to go along with JSZip." />
+    <title>{{page.title}}</title>
+
+    <!-- Latest compiled and minified CSS -->
+    <link rel="stylesheet" href="//netdna.bootstrapcdn.com/bootstrap/3.1.1/css/bootstrap.min.css">
+
+    <!-- Optional theme -->
+    <link rel="stylesheet" href="//netdna.bootstrapcdn.com/bootstrap/3.1.1/css/bootstrap-theme.min.css">
+
+    <!-- Latest compiled and minified JavaScript -->
+    <!-- <script src="//netdna.bootstrapcdn.com/bootstrap/3.1.1/js/bootstrap.min.js"></script> -->
+
+    <link rel="stylesheet" href="{{site.baseurl}}/documentation/css/pygments.css">
+    <link rel="stylesheet" href="{{site.baseurl}}/documentation/css/main.css">
+
+    <script type="text/javascript" src="{{site.baseurl}}/dist/jszip-utils.js"></script>
+    <!--
+    Mandatory in IE 6, 7, 8 and 9.
+    -->
+    <!--[if IE]>
+    <script type="text/javascript" src="{{site.baseurl}}/dist/jszip-utils-ie.js"></script>
+    <![endif]-->
+  </head>
+  <body>
+    <div class="container">
+      <div class="navbar navbar-default" role="navigation">
+        <div class="container-fluid">
+          <div class="navbar-header">
+            <a class="navbar-brand" href="{{site.baseurl}}/"><strong>JS</strong>ZipUtils</a>
+          </div>
+          <ul class="nav navbar-nav">
+            <li {% if page.section == "api" %}class="active"{% endif %}>
+              <a href="{{site.baseurl}}/documentation/api.html">API</a>
+            </li>
+          </ul>
+          <ul class="nav navbar-nav navbar-right">
+            <li>
+              <a href="https://github.com/Stuk/jszip-utils">current version : <strong>v0.1.0</strong></a>
+            </li>
+          </ul>
+        </div>
+      </div>
+      <div class="row">
+        <nav class="{% if page.section %}col-md-3{% endif %}">
+        {% if page.section == "api" %}
+        <!-- <h4>Documentation</h4> -->
+        <ul class="nav">
+          <li><a href="{{site.baseurl}}/documentation/api.html">JSZipUtils</a>
+            <ul>
+              <li><a href="{{site.baseurl}}/documentation/api/getbinarycontent.html">getBinaryContent(path, callback)</a></li>
+            </ul>
+          </li>
+        </ul>
+        {% endif %}
+        </nav>
+        <div class="{% if page.section %}col-md-9{% else %}col-md-12{% endif %}">
+          <h1>{{page.title}}</h1>
+          <!-- ===================== -->
+          <!-- === C O N T E N T === -->
+          <!-- ===================== -->
+
+          {{content}}
+
+          <!-- ===================== -->
+          <!-- == / C O N T E N T == -->
+          <!-- ===================== -->
+        </div>
+      </div>
+    </div>
+    <script>
+      // FIXME find how to do that cleanly
+      (function(){
+        var tables = document.getElementsByTagName("table");
+        for(var i = 0; i < tables.length; i++) {
+          tables[i].className = "table table-condensed table-striped table-bordered";
+        }
+      })();
+    </script>
+  </body>
+</html>

+ 6 - 0
libs/jszip-utils/documentation/api.md

@@ -0,0 +1,6 @@
+---
+title: "JSZipUtils"
+layout: default
+section: api
+---
+This section contains the documentation of the different functions.

+ 85 - 0
libs/jszip-utils/documentation/api/getbinarycontent.md

@@ -0,0 +1,85 @@
+---
+title: "getBinaryContent(path, callback)"
+layout: default
+section: api
+---
+
+__Description__ : Use an AJAX call to fetch a file (HTTP GET) on the server
+that served the file. Cross domain requests will work if the browser support
+[them](http://caniuse.com/cors) but only if the server send the
+[right headers](https://developer.mozilla.org/en-US/docs/HTTP/Access_control_CORS).
+This function doesn't follow redirects: currently only `200 OK` are accepted.
+
+__Arguments__
+
+name     | type               | description
+---------|--------------------|------------
+path     | String             | the path to the resource to GET.
+options  | function or object | A callback function or options object.
+
+The options object has a required `callback` function property and an optional `progress` function property.
+
+The `callback` function has the following signature: `function (err, data) {...}` :
+
+name | type               | description
+-----|--------------------|------------
+err  | Error              | the error, if any.
+data | ArrayBuffer/String | the data in a format suitable for JSZip.
+
+The `progress` function has the following signature: `function (event) {...}`, where `event` has the following properties:
+
+name    | type               | description
+--------|--------------------|------------
+path    | string             | The path of the file being loaded.
+loaded  | number             | the amount of data currently transfered.
+total   | number             | the total amount of data to be transferred.
+percent | number             | the percent of data currently transfered.
+
+The data can be parsed by [JSZip#load](http://stuk.github.io/jszip/#doc_load_data_options)
+or used with [JSZip#file](http://stuk.github.io/jszip/#doc_file_name_data_options)
+to add a new file. With `JSZip#file` use `{binary:true}` as options.
+
+__Returns__ : Nothing.
+
+__Throws__ : Nothing.
+
+<!--
+__Complexity__ : **O(1)** everywhere but on IE <=9, **O(n)** on IE <=9, n being
+the length of the fetched data.
+-->
+
+__Example__
+
+```js
+// loading a zip file
+JSZipUtils.getBinaryContent("path/to/file.zip", function (err, data) {
+   if(err) {
+      throw err; // or handle the error
+   }
+   var zip = new JSZip(data);
+});
+
+// loading a file and add it in a zip file
+JSZipUtils.getBinaryContent("path/to/picture.png", function (err, data) {
+   if(err) {
+      throw err; // or handle the error
+   }
+   var zip = new JSZip();
+   zip.file("picture.png", data, {binary:true});
+});
+
+// loading a zip file with a progress callback
+JSZipUtils.getBinaryContent("path/to/file.zip", {
+    progress: function (event) {
+        console.log(event.percent + "% of " + event.path+ " loaded")
+    },
+    callback: function (err, data) {
+        if(err) {
+           throw err; // or handle the error
+        }
+        var zip = new JSZip(data);
+    }
+});
+```
+
+

+ 7 - 0
libs/jszip-utils/documentation/css/main.css

@@ -0,0 +1,7 @@
+ul.nav ul {
+  list-style:none;
+  margin: 0;
+  padding: 0 0 0 25px;
+}
+
+

+ 64 - 0
libs/jszip-utils/documentation/css/pygments.css

@@ -0,0 +1,64 @@
+/* Generated with :
+ * pygmentize -S default -f html > pygments.css
+ */
+.hll { background-color: #ffffcc }
+.c { color: #408080; font-style: italic } /* Comment */
+.err { border: 1px solid #FF0000 } /* Error */
+.k { color: #008000; font-weight: bold } /* Keyword */
+.o { color: #666666 } /* Operator */
+.cm { color: #408080; font-style: italic } /* Comment.Multiline */
+.cp { color: #BC7A00 } /* Comment.Preproc */
+.c1 { color: #408080; font-style: italic } /* Comment.Single */
+.cs { color: #408080; font-style: italic } /* Comment.Special */
+.gd { color: #A00000 } /* Generic.Deleted */
+.ge { font-style: italic } /* Generic.Emph */
+.gr { color: #FF0000 } /* Generic.Error */
+.gh { color: #000080; font-weight: bold } /* Generic.Heading */
+.gi { color: #00A000 } /* Generic.Inserted */
+.go { color: #888888 } /* Generic.Output */
+.gp { color: #000080; font-weight: bold } /* Generic.Prompt */
+.gs { font-weight: bold } /* Generic.Strong */
+.gu { color: #800080; font-weight: bold } /* Generic.Subheading */
+.gt { color: #0044DD } /* Generic.Traceback */
+.kc { color: #008000; font-weight: bold } /* Keyword.Constant */
+.kd { color: #008000; font-weight: bold } /* Keyword.Declaration */
+.kn { color: #008000; font-weight: bold } /* Keyword.Namespace */
+.kp { color: #008000 } /* Keyword.Pseudo */
+.kr { color: #008000; font-weight: bold } /* Keyword.Reserved */
+.kt { color: #B00040 } /* Keyword.Type */
+.m { color: #666666 } /* Literal.Number */
+.s { color: #BA2121 } /* Literal.String */
+.na { color: #7D9029 } /* Name.Attribute */
+.nb { color: #008000 } /* Name.Builtin */
+.nc { color: #0000FF; font-weight: bold } /* Name.Class */
+.no { color: #880000 } /* Name.Constant */
+.nd { color: #AA22FF } /* Name.Decorator */
+.ni { color: #999999; font-weight: bold } /* Name.Entity */
+.ne { color: #D2413A; font-weight: bold } /* Name.Exception */
+.nf { color: #0000FF } /* Name.Function */
+.nl { color: #A0A000 } /* Name.Label */
+.nn { color: #0000FF; font-weight: bold } /* Name.Namespace */
+.nt { color: #008000; font-weight: bold } /* Name.Tag */
+.nv { color: #19177C } /* Name.Variable */
+.ow { color: #AA22FF; font-weight: bold } /* Operator.Word */
+.w { color: #bbbbbb } /* Text.Whitespace */
+.mf { color: #666666 } /* Literal.Number.Float */
+.mh { color: #666666 } /* Literal.Number.Hex */
+.mi { color: #666666 } /* Literal.Number.Integer */
+.mo { color: #666666 } /* Literal.Number.Oct */
+.sb { color: #BA2121 } /* Literal.String.Backtick */
+.sc { color: #BA2121 } /* Literal.String.Char */
+.sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */
+.s2 { color: #BA2121 } /* Literal.String.Double */
+.se { color: #BB6622; font-weight: bold } /* Literal.String.Escape */
+.sh { color: #BA2121 } /* Literal.String.Heredoc */
+.si { color: #BB6688; font-weight: bold } /* Literal.String.Interpol */
+.sx { color: #008000 } /* Literal.String.Other */
+.sr { color: #BB6688 } /* Literal.String.Regex */
+.s1 { color: #BA2121 } /* Literal.String.Single */
+.ss { color: #19177C } /* Literal.String.Symbol */
+.bp { color: #008000 } /* Name.Builtin.Pseudo */
+.vc { color: #19177C } /* Name.Variable.Class */
+.vg { color: #19177C } /* Name.Variable.Global */
+.vi { color: #19177C } /* Name.Variable.Instance */
+.il { color: #666666 } /* Literal.Number.Integer.Long */

+ 25 - 0
libs/jszip-utils/index.md

@@ -0,0 +1,25 @@
+---
+title: JSZipUtils
+layout: default
+---
+
+A collection of cross-browser utilities to go along with JSZip, see
+http://stuk.github.io/jszip-utils for all the documentation.
+
+It has two parts, one for every browsers and one for IE < 10. To use it :
+
+```html
+<script type="text/javascript" src="dist/jszip-utils.js"></script>
+<!--
+Mandatory in IE 6, 7, 8 and 9.
+-->
+<!--[if IE]>
+<script type="text/javascript" src="dist/jszip-utils-ie.js"></script>
+<![endif]-->
+```
+
+License
+-------
+
+JSZipUtils is dual-licensed. You may use it under the MIT license *or* the GPLv3
+license. See LICENSE.markdown.

+ 145 - 0
libs/jszip-utils/lib/index.js

@@ -0,0 +1,145 @@
+'use strict';
+/*globals Promise */
+
+var JSZipUtils = {};
+// just use the responseText with xhr1, response with xhr2.
+// The transformation doesn't throw away high-order byte (with responseText)
+// because JSZip handles that case. If not used with JSZip, you may need to
+// do it, see https://developer.mozilla.org/En/Using_XMLHttpRequest#Handling_binary_data
+JSZipUtils._getBinaryFromXHR = function (xhr) {
+    // for xhr.responseText, the 0xFF mask is applied by JSZip
+    return xhr.response || xhr.responseText;
+};
+
+// taken from jQuery
+function createStandardXHR() {
+    try {
+        return new window.XMLHttpRequest();
+    } catch( e ) {}
+}
+
+function createActiveXHR() {
+    try {
+        return new window.ActiveXObject("Microsoft.XMLHTTP");
+    } catch( e ) {}
+}
+
+// Create the request object
+var createXHR = (typeof window !== "undefined" && window.ActiveXObject) ?
+    /* Microsoft failed to properly
+     * implement the XMLHttpRequest in IE7 (can't request local files),
+     * so we use the ActiveXObject when it is available
+     * Additionally XMLHttpRequest can be disabled in IE7/IE8 so
+     * we need a fallback.
+     */
+    function() {
+    return createStandardXHR() || createActiveXHR();
+} :
+    // For all other browsers, use the standard XMLHttpRequest object
+    createStandardXHR;
+
+
+/**
+ * @param  {string} path    The path to the resource to GET.
+ * @param  {function|{callback: function, progress: function}} options
+ * @return {Promise|undefined} If no callback is passed then a promise is returned
+ */
+JSZipUtils.getBinaryContent = function (path, options) {
+    var promise, resolve, reject;
+    var callback;
+
+    if (!options) {
+        options = {};
+    }
+
+    // backward compatible callback
+    if (typeof options === "function") {
+        callback = options;
+        options = {};
+    } else if (typeof options.callback === 'function') {
+        // callback inside options object
+        callback = options.callback;
+    }
+
+    if (!callback && typeof Promise !== "undefined") {
+        promise = new Promise(function (_resolve, _reject) {
+            resolve = _resolve;
+            reject = _reject;
+        });
+    } else {
+        resolve = function (data) { callback(null, data); };
+        reject = function (err) { callback(err, null); };
+    }
+
+    /*
+     * Here is the tricky part : getting the data.
+     * In firefox/chrome/opera/... setting the mimeType to 'text/plain; charset=x-user-defined'
+     * is enough, the result is in the standard xhr.responseText.
+     * cf https://developer.mozilla.org/En/XMLHttpRequest/Using_XMLHttpRequest#Receiving_binary_data_in_older_browsers
+     * In IE <= 9, we must use (the IE only) attribute responseBody
+     * (for binary data, its content is different from responseText).
+     * In IE 10, the 'charset=x-user-defined' trick doesn't work, only the
+     * responseType will work :
+     * http://msdn.microsoft.com/en-us/library/ie/hh673569%28v=vs.85%29.aspx#Binary_Object_upload_and_download
+     *
+     * I'd like to use jQuery to avoid this XHR madness, but it doesn't support
+     * the responseType attribute : http://bugs.jquery.com/ticket/11461
+     */
+    try {
+        var xhr = createXHR();
+
+        xhr.open('GET', path, true);
+
+        // recent browsers
+        if ("responseType" in xhr) {
+            xhr.responseType = "arraybuffer";
+        }
+
+        // older browser
+        if(xhr.overrideMimeType) {
+            xhr.overrideMimeType("text/plain; charset=x-user-defined");
+        }
+
+        xhr.onreadystatechange = function (event) {
+            // use `xhr` and not `this`... thanks IE
+            if (xhr.readyState === 4) {
+                if (xhr.status === 200 || xhr.status === 0) {
+                    try {
+                        resolve(JSZipUtils._getBinaryFromXHR(xhr));
+                    } catch(err) {
+                        reject(new Error(err));
+                    }
+                } else {
+                    reject(new Error("Ajax error for " + path + " : " + this.status + " " + this.statusText));
+                }
+            }
+        };
+
+        if(options.progress) {
+            xhr.onprogress = function(e) {
+                options.progress({
+                    path: path,
+                    originalEvent: e,
+                    percent: e.loaded / e.total * 100,
+                    loaded: e.loaded,
+                    total: e.total
+                });
+            };
+        }
+
+        xhr.send();
+
+    } catch (e) {
+        reject(new Error(e), null);
+    }
+
+    // returns a promise or undefined depending on whether a callback was
+    // provided
+    return promise;
+};
+
+// export
+module.exports = JSZipUtils;
+
+// enforcing Stuk's coding style
+// vim: set shiftwidth=4 softtabstop=4:

+ 43 - 0
libs/jszip-utils/lib/index_IE.js

@@ -0,0 +1,43 @@
+/* jshint evil: true, newcap: false */
+/* global IEBinaryToArray_ByteStr, IEBinaryToArray_ByteStr_Last */
+"use strict";
+
+// Adapted from http://stackoverflow.com/questions/1095102/how-do-i-load-binary-image-data-using-javascript-and-xmlhttprequest
+var IEBinaryToArray_ByteStr_Script =
+    "<!-- IEBinaryToArray_ByteStr -->\r\n"+
+    "<script type='text/vbscript'>\r\n"+
+    "Function IEBinaryToArray_ByteStr(Binary)\r\n"+
+    "   IEBinaryToArray_ByteStr = CStr(Binary)\r\n"+
+    "End Function\r\n"+
+    "Function IEBinaryToArray_ByteStr_Last(Binary)\r\n"+
+    "   Dim lastIndex\r\n"+
+    "   lastIndex = LenB(Binary)\r\n"+
+    "   if lastIndex mod 2 Then\r\n"+
+    "       IEBinaryToArray_ByteStr_Last = Chr( AscB( MidB( Binary, lastIndex, 1 ) ) )\r\n"+
+    "   Else\r\n"+
+    "       IEBinaryToArray_ByteStr_Last = "+'""'+"\r\n"+
+    "   End If\r\n"+
+    "End Function\r\n"+
+    "</script>\r\n";
+
+// inject VBScript
+document.write(IEBinaryToArray_ByteStr_Script);
+
+global.JSZipUtils._getBinaryFromXHR = function (xhr) {
+    var binary = xhr.responseBody;
+    var byteMapping = {};
+    for ( var i = 0; i < 256; i++ ) {
+        for ( var j = 0; j < 256; j++ ) {
+            byteMapping[ String.fromCharCode( i + (j << 8) ) ] =
+                String.fromCharCode(i) + String.fromCharCode(j);
+        }
+    }
+    var rawBytes = IEBinaryToArray_ByteStr(binary);
+    var lastChr = IEBinaryToArray_ByteStr_Last(binary);
+    return rawBytes.replace(/[\s\S]/g, function( match ) {
+        return byteMapping[match];
+    }) + lastChr;
+};
+
+// enforcing Stuk's coding style
+// vim: set shiftwidth=4 softtabstop=4:

+ 9 - 0
libs/jszip-utils/lib/license_header.js

@@ -0,0 +1,9 @@
+/*@preserve
+
+JSZipUtils - A collection of cross-browser utilities to go along with JSZip.
+<http://stuk.github.io/jszip-utils>
+
+(c) 2014-2019 Stuart Knightley, David Duponchel
+Dual licenced under the MIT license or GPLv3. See https://raw.github.com/Stuk/jszip-utils/master/LICENSE.markdown.
+
+*/

+ 42 - 0
libs/jszip-utils/package.json

@@ -0,0 +1,42 @@
+{
+  "name": "jszip-utils",
+  "version": "0.1.0",
+  "author": "Stuart Knightley <stuart@stuartk.com>",
+  "description": "A collection of cross-browser utilities to go along with JSZip.",
+  "scripts": {
+    "test": "npm run test-browser",
+    "test-browser": "grunt build && grunt test",
+    "lint": "grunt jshint"
+  },
+  "main": "./lib/index",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/Stuk/jszip-utils.git"
+  },
+  "contributors": [
+    {
+      "name": "David Duponchel"
+    }
+  ],
+  "keywords": [
+    "JSZip",
+    "ajax",
+    "cross browser",
+    "IE",
+    "Internet Explorer"
+  ],
+  "devDependencies": {
+    "browserify": "~2.35.0",
+    "grunt": "~0.4.1",
+    "grunt-browserify": "~1.3.0",
+    "grunt-cli": "~0.1.9",
+    "grunt-contrib-connect": "~2.0.0",
+    "grunt-contrib-jshint": "~0.6.4",
+    "grunt-contrib-uglify": "~4.0.1",
+    "grunt-saucelabs": "Stuk/grunt-saucelabs#v10.0.0",
+    "jshint": "~2.1.11",
+    "qunit": "~2.9.2",
+    "qunitjs": "2.0.1"
+  },
+  "license": "(MIT OR GPL-3.0)"
+}

+ 55 - 0
libs/jszip-utils/test/index.html

@@ -0,0 +1,55 @@
+<!DOCTYPE html>
+<html>
+   <head>
+      <meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
+      <title>JSZip Testing</title>
+      <link media="screen" href="../node_modules/qunit/qunit/qunit.css" type="text/css" rel="stylesheet">
+      <script type="text/javascript" src="../node_modules/qunit/qunit/qunit.js"></script>
+
+      <script type="text/javascript">
+        // Exposes test results for Saucelabs. See
+        // https://wiki.saucelabs.com/display/DOCS/Setting+Up+JavaScript+Unit+Testing+Frameworks
+        (function () {
+          var log = [];
+          var testName;
+
+          QUnit.done(function (test_results) {
+            var tests = [];
+            for(var i = 0, len = log.length; i < len; i++) {
+              var details = log[i];
+              tests.push({
+                name: details.name,
+                result: details.result,
+                message: details.message,
+                duration: details.runtime
+              });
+            }
+            test_results.tests = tests;
+            test_results.duration = test_results.runtime;
+
+            window.global_test_results = test_results;
+          });
+          QUnit.log(function(details){
+              log.push(details);
+          });
+        })()
+      </script>
+
+      <script type="text/javascript" src="../dist/jszip-utils.js"></script>
+      <!--
+      Mandatory in IE 6, 7, 8 and 9.
+      -->
+      <!--[if IE]>
+      <script type="text/javascript" src="../dist/jszip-utils-ie.js"></script>
+      <![endif]-->
+
+      <script type="text/javascript" src="test.js"></script>
+
+   </head>
+   <body>
+      <div id="qunit"></div>
+      <div id="qunit-fixture"></div>
+   </body>
+</html>
+<!-- enforcing Stuk's coding style -->
+<!-- vim: set shiftwidth=4 softtabstop=4 foldmethod=marker: -->

+ 1 - 0
libs/jszip-utils/test/ref/amount.txt

@@ -0,0 +1 @@
+€15

BIN
libs/jszip-utils/test/ref/smile.gif


+ 226 - 0
libs/jszip-utils/test/test.js

@@ -0,0 +1,226 @@
+'use strict';
+
+/**
+ * This is an helper function to transform the input into a binary string.
+ * The transformation is normaly handled by JSZip.
+ * @param {String|ArrayBuffer} input the input to convert.
+ * @return {String} the binary string.
+ */
+function toString(input) {
+    var result = "",
+        i, len, isArray = (typeof input !== "string");
+
+    if (isArray) {
+        input = new Uint8Array(input);
+    }
+
+    for (i = 0, len = input.length; i < len; i++) {
+        result += String.fromCharCode(
+            (isArray ? input[i] : input.charCodeAt(i)) % 0xFF
+        );
+    }
+
+    return result;
+}
+
+QUnit.module("callback");
+
+QUnit.test("JSZipUtils.getBinaryContent, text, 200 OK", function (assert) {
+    var done = assert.async();
+    var p = JSZipUtils.getBinaryContent("ref/amount.txt", function (err, data) {
+        assert.equal(err, null, "no error");
+        assert.equal(toString(data), "\xe2\x82\xac\x31\x35\x0a", "The content has been fetched");
+        done();
+    });
+    assert.strictEqual(p, undefined, 'not return promise');
+});
+
+QUnit.test("JSZipUtils.getBinaryContent, image, 200 OK", function (assert) {
+    var done = assert.async();
+    var p = JSZipUtils.getBinaryContent("ref/smile.gif", function (err, data) {
+        assert.equal(err, null, "no error");
+        assert.equal(toString(data).indexOf("\x47\x49\x46\x38\x37\x61"), 0, "The content has been fetched");
+        done();
+    });
+    assert.strictEqual(p, undefined, 'not return promise');
+});
+
+QUnit.test("JSZipUtils.getBinaryContent, 404 NOT FOUND", function (assert) {
+    var done = assert.async();
+    var p = JSZipUtils.getBinaryContent("ref/nothing", function (err, data) {
+        assert.equal(data, null, "no error");
+        assert.ok(err instanceof Error, "The error is an Error");
+        done();
+    });
+    assert.strictEqual(p, undefined, 'not return promise');
+});
+
+
+
+QUnit.module("options={callback}");
+
+QUnit.test("JSZipUtils.getBinaryContent, text, 200 OK", function (assert) {
+    var done = assert.async();
+    var p = JSZipUtils.getBinaryContent("ref/amount.txt", {
+        callback: function (err, data) {
+            assert.equal(err, null, "no error");
+            assert.equal(toString(data), "\xe2\x82\xac\x31\x35\x0a", "The content has been fetched");
+            done();
+        }
+    });
+    assert.strictEqual(p, undefined, 'not return promise');
+});
+
+QUnit.test("JSZipUtils.getBinaryContent, image, 200 OK", function (assert) {
+    var done = assert.async();
+    var p = JSZipUtils.getBinaryContent("ref/smile.gif", {
+        callback: function (err, data) {
+            assert.equal(err, null, "no error");
+            assert.equal(toString(data).indexOf("\x47\x49\x46\x38\x37\x61"), 0, "The content has been fetched");
+            done();
+        }
+    });
+    assert.strictEqual(p, undefined, 'not return promise');
+});
+
+QUnit.test("JSZipUtils.getBinaryContent, 404 NOT FOUND", function (assert) {
+    var done = assert.async();
+    var p = JSZipUtils.getBinaryContent("ref/nothing", {
+        callback: function (err, data) {
+            assert.equal(data, null, "no error");
+            assert.ok(err instanceof Error, "The error is an Error");
+            done();
+        }
+    });
+    assert.strictEqual(p, undefined, 'not return promise');
+});
+
+// Guard Promise tests for IE
+if (typeof Promise === "undefined") {
+    QUnit.module("Promises");
+    QUnit.skip("Skipping promise tests");
+} else {
+    QUnit.module("Promise (no parameters)");
+
+    QUnit.test("JSZipUtils.getBinaryContent amount, text, 200 OK", function (assert) {
+        var done = assert.async();
+        JSZipUtils.getBinaryContent("ref/amount.txt").then(function (data) {
+                assert.equal(toString(data), "\xe2\x82\xac\x31\x35\x0a", "The content has been fetched");
+                done();
+            })
+            .catch(function (err) {
+                assert.equal(err, null, "no error");
+                done();
+            });
+    });
+
+    QUnit.test("JSZipUtils.getBinaryContent smile, image, 200 OK", function (assert) {
+        var done = assert.async();
+        JSZipUtils.getBinaryContent("ref/smile.gif").then(function (data) {
+            assert.equal(toString(data).indexOf("\x47\x49\x46\x38\x37\x61"), 0, "The content has been fetched");
+            done();
+        }).catch(function (err) {
+            assert.equal(err, null, "no error");
+            done();
+        });
+    });
+
+    QUnit.test("JSZipUtils.getBinaryContent nothing, 404 NOT FOUND", function (assert) {
+        var done = assert.async();
+        JSZipUtils.getBinaryContent("ref/nothing").then(function (data) {
+            assert.equal(data, null, "no error");
+            done();
+        }).catch(function (err) {
+            assert.ok(err instanceof Error, "The error is an Error");
+            done();
+        });
+    });
+
+    QUnit.module("Promise, options={}");
+
+    QUnit.test("JSZipUtils.getBinaryContent amount, text, 200 OK", function (assert) {
+        var done = assert.async();
+        JSZipUtils.getBinaryContent("ref/amount.txt", {}).then(function (data) {
+                assert.equal(toString(data), "\xe2\x82\xac\x31\x35\x0a", "The content has been fetched");
+                done();
+            })
+            .catch(function (err) {
+                assert.equal(err, null, "no error");
+                done();
+            });
+    });
+
+    QUnit.test("JSZipUtils.getBinaryContent smile, image, 200 OK", function (assert) {
+        var done = assert.async();
+        JSZipUtils.getBinaryContent("ref/smile.gif", {}).then(function (data) {
+            assert.equal(toString(data).indexOf("\x47\x49\x46\x38\x37\x61"), 0, "The content has been fetched");
+            done();
+        }).catch(function (err) {
+            assert.equal(err, null, "no error");
+            done();
+        });
+    });
+
+    QUnit.test("JSZipUtils.getBinaryContent nothing, 404 NOT FOUND", function (assert) {
+        var done = assert.async();
+        JSZipUtils.getBinaryContent("ref/nothing", {}).then(function (data) {
+            assert.equal(data, null, "no error");
+            done();
+        }).catch(function (err) {
+            assert.ok(err instanceof Error, "The error is an Error");
+            done();
+        });
+    });
+
+    QUnit.module("Promise, options={progress}");
+
+    QUnit.test("JSZipUtils.getBinaryContent amount, text, 200 OK", function (assert) {
+        var done = assert.async();
+        var progress = assert.async();
+        JSZipUtils.getBinaryContent("ref/amount.txt", { progress: function(e){
+            assert.ok(true, 'progress to be called');
+            assert.strictEqual(e.total, 6, 'total');
+            progress();
+        }}).then(function (data) {
+                assert.equal(toString(data), "\xe2\x82\xac\x31\x35\x0a", "The content has been fetched");
+                done();
+            })
+            .catch(function (err) {
+                assert.equal(err, null, "no error");
+                done();
+            });
+    });
+
+    QUnit.test("JSZipUtils.getBinaryContent smile, image, 200 OK", function (assert) {
+        var done = assert.async();
+        var progress = assert.async();
+        JSZipUtils.getBinaryContent("ref/smile.gif", { progress: function(e){
+            assert.ok(true, 'progress to be called');
+            assert.strictEqual(e.total, 41, 'total');
+            progress();
+        }}).then(function (data) {
+            assert.equal(toString(data).indexOf("\x47\x49\x46\x38\x37\x61"), 0, "The content has been fetched");
+            done();
+        }).catch(function (err) {
+            assert.equal(err, null, "no error");
+            done();
+        });
+    });
+
+    QUnit.test("JSZipUtils.getBinaryContent nothing, 404 NOT FOUND", function (assert) {
+        var done = assert.async();
+
+        JSZipUtils.getBinaryContent("ref/nothing", { progress: function(e){
+
+        }}).then(function (data) {
+            assert.equal(data, null, "no error");
+            done();
+        }).catch(function (err) {
+            assert.ok(err instanceof Error, "The error is an Error");
+            done();
+        });
+    });
+} // Promise tests
+
+// enforcing Stuk's coding style
+// vim: set shiftwidth=4 softtabstop=4:

+ 204 - 0
libs/jszip/CHANGES.md

@@ -0,0 +1,204 @@
+---
+title: Changelog
+layout: default
+section: main
+---
+
+### v3.10.1 2022-08-02
+
+- Add sponsorship files.
+    + If you appreciate the time spent maintaining JSZip then I would really appreciate [your sponsorship](https://github.com/sponsors/Stuk).
+- Consolidate metadata types and expose OnUpdateCallback [#851](https://github.com/Stuk/jszip/pull/851) and [#852](https://github.com/Stuk/jszip/pull/852)
+- use `const` instead `var` in example from README.markdown [#828](https://github.com/Stuk/jszip/pull/828)
+- Switch manual download link to HTTPS [#839](https://github.com/Stuk/jszip/pull/839)
+
+Internals:
+
+- Replace jshint with eslint [#842](https://github.com/Stuk/jszip/pull/842)
+- Add performance tests [#834](https://github.com/Stuk/jszip/pull/834)
+
+### v3.10.0 2022-05-20
+
+- Change setimmediate dependency to more efficient one. Fixes https://github.com/Stuk/jszip/issues/617 (see [#829](https://github.com/Stuk/jszip/pull/829))
+- Update types of `currentFile` metadata to include `null` (see [#826](https://github.com/Stuk/jszip/pull/826))
+
+### v3.9.1 2022-04-06
+
+- Fix recursive definition of `InputFileFormat` introduced in 3.9.0.
+
+### v3.9.0 2022-04-04
+
+- Update types JSZip#loadAsync to accept a promise for data, and remove arguments from `new JSZip()` (see [#752](https://github.com/Stuk/jszip/pull/752))
+- Update types for `compressionOptions` to JSZipFileOptions and JSZipGeneratorOptions (see [#722](https://github.com/Stuk/jszip/pull/722))
+- Add types for `generateInternalStream` (see [#774](https://github.com/Stuk/jszip/pull/774))
+
+### v3.8.0 2022-03-30
+
+- Santize filenames when files are loaded with `loadAsync`, to avoid ["zip slip" attacks](https://snyk.io/research/zip-slip-vulnerability). The original filename is available on each zip entry as `unsafeOriginalName`. See the [documentation](https://stuk.github.io/jszip/documentation/api_jszip/load_async.html). Many thanks to McCaulay Hudson for reporting.
+
+### v3.7.1 2021-08-05
+
+- Fix build of `dist` files.
+    + Note: this version ensures the changes from 3.7.0 are actually included in the `dist` files. Thanks to Evan W for reporting.
+
+### v3.7.0 2021-07-23
+
+- Fix: Use a null prototype object for this.files  (see [#766](https://github.com/Stuk/jszip/pull/766))
+    + This change might break existing code if it uses prototype methods on the `.files` property of a zip object, for example `zip.files.toString()`. This approach is taken to prevent files in the zip overriding object methods that would exist on a normal object.
+
+### v3.6.0 2021-02-09
+
+- Fix: redirect main to dist on browsers (see [#742](https://github.com/Stuk/jszip/pull/742))
+- Fix duplicate require DataLengthProbe, utils (see [#734](https://github.com/Stuk/jszip/pull/734))
+- Fix small error in read_zip.md (see [#703](https://github.com/Stuk/jszip/pull/703))
+
+### v3.5.0 2020-05-31
+
+- Fix 'End of data reached' error when file extra field is invalid (see [#544](https://github.com/Stuk/jszip/pull/544)).
+- Typescript definitions: Add null to return types of functions that may return null (see [#669](https://github.com/Stuk/jszip/pull/669)).
+- Typescript definitions: Correct nodeStream's type (see [#682](https://github.com/Stuk/jszip/pull/682))
+- Typescript definitions: Add string output type (see [#666](https://github.com/Stuk/jszip/pull/666))
+
+### v3.4.0 2020-04-19
+
+- Add Typescript type definitions (see [#601](https://github.com/Stuk/jszip/pull/601)).
+
+### v3.3.0 2020-04-1
+
+- Change browser module resolution to support Angular packager (see [#614](https://github.com/Stuk/jszip/pull/614)).
+
+### v3.2.2 2019-07-04
+- No public changes, but a number of testing dependencies have been updated.
+- Tested browsers are now: Internet Explorer 11, Chrome (most recent) and Firefox (most recent). Other browsers (specifically Safari) are still supported however testing them on Saucelabs is broken and so they were removed from the test matrix.
+
+### v3.2.1 2019-03-22
+- Corrected built dist files
+
+### v3.2.0 2019-02-21
+- Update dependencies to reduce bundle size (see [#532](https://github.com/Stuk/jszip/pull/532)).
+- Fix deprecated Buffer constructor usage and add safeguards (see [#506](https://github.com/Stuk/jszip/pull/506)).
+
+### v3.1.5 2017-11-09
+- Fix IE11 memory leak (see [#429](https://github.com/Stuk/jszip/pull/429)).
+- Handle 2 nodejs deprecations (see [#459](https://github.com/Stuk/jszip/pull/459)).
+- Improve the "unsupported format" error message (see [#461](https://github.com/Stuk/jszip/pull/461)).
+- Improve webworker compatibility (see [#468](https://github.com/Stuk/jszip/pull/468)).
+- Fix nodejs 0.10 compatibility (see [#480](https://github.com/Stuk/jszip/pull/480)).
+- Improve the error without type in async() (see [#481](https://github.com/Stuk/jszip/pull/481)).
+
+### v3.1.4 2017-08-24
+- consistently use our own utils object for inheritance (see [#395](https://github.com/Stuk/jszip/pull/395)).
+- lower the memory consumption in `generate*` with a lot of files (see [#449](https://github.com/Stuk/jszip/pull/449)).
+
+### v3.1.3 2016-10-06
+- instanceof failing in window / iframe contexts (see [#350](https://github.com/Stuk/jszip/pull/350)).
+- remove a copy with blob output (see [#357](https://github.com/Stuk/jszip/pull/357)).
+- fix crc32 check for empty entries (see [#358](https://github.com/Stuk/jszip/pull/358)).
+- fix the base64 error message with data uri (see [#359](https://github.com/Stuk/jszip/pull/359)).
+
+### v3.1.2 2016-08-23
+- fix support of nodejs `process.platform` in `generate*` methods (see [#335](https://github.com/Stuk/jszip/pull/335)).
+- improve browserify/webpack support (see [#333](https://github.com/Stuk/jszip/pull/333)).
+- partial support of a promise of text (see [#337](https://github.com/Stuk/jszip/pull/337)).
+- fix streamed zip files containing folders (see [#342](https://github.com/Stuk/jszip/pull/342)).
+
+### v3.1.1 2016-08-08
+- Use a hard-coded JSZip.version, fix an issue with webpack (see [#328](https://github.com/Stuk/jszip/pull/328)).
+
+### v3.1.0 2016-08-03
+- utils.delay: use macro tasks instead of micro tasks (see [#288](https://github.com/Stuk/jszip/pull/288)).
+- Harden base64 decode (see [#316](https://github.com/Stuk/jszip/pull/316)).
+- Add JSZip.version and the version in the header (see [#317](https://github.com/Stuk/jszip/pull/317)).
+- Support Promise(Blob) (see [#318](https://github.com/Stuk/jszip/pull/318)).
+- Change JSZip.external.Promise implementation (see [#321](https://github.com/Stuk/jszip/pull/321)).
+- Update pako to v1.0.2 to fix a DEFLATE bug (see [#322](https://github.com/Stuk/jszip/pull/322)).
+
+### v3.0.0 2016-04-13
+This release changes a lot of methods, please see [the upgrade guide](http://stuk.github.io/jszip/documentation/upgrade_guide.html).
+
+- replace sync getters and `generate()` with async methods (see [#195](https://github.com/Stuk/jszip/pull/195)).
+- support nodejs streams (in `file()` and `generateAsync()`).
+- support Blob and Promise in `file()` and `loadAsync()` (see [#275](https://github.com/Stuk/jszip/pull/275)).
+- add `support.nodestream`.
+- zip.filter: remove the defensive copy.
+- remove the deprecated API (see [#253](https://github.com/Stuk/jszip/pull/253)).
+- `type` is now mandatory in `generateAsync()`.
+- change the createFolders default value (now `true`).
+- Dates: use UTC instead of the local timezone.
+- Add `base64` and `array` as possible output type.
+- Add a forEach method.
+- Drop node 0.8 support (see [#270](https://github.com/Stuk/jszip/pull/270)).
+
+### v2.6.1 2016-07-28
+- update pako to v1.0.2 to fix a DEFLATE bug (see [#322](https://github.com/Stuk/jszip/pull/322)).
+
+### v2.6.0 2016-03-23
+- publish `dist/` files in the npm package (see [#225](https://github.com/Stuk/jszip/pull/225)).
+- update pako to v1.0.0 (see [#261](https://github.com/Stuk/jszip/pull/261)).
+- add support of Array in JSZip#load (see [#252](https://github.com/Stuk/jszip/pull/252)).
+- improve file name / comment encoding support (see [#211](https://github.com/Stuk/jszip/pull/211)).
+- handle prepended data (see [#266](https://github.com/Stuk/jszip/pull/266)).
+- improve platform coverage in tests (see [#233](https://github.com/Stuk/jszip/pull/233) and [#269](https://github.com/Stuk/jszip/pull/269)).
+
+### v2.5.0 2015-03-10
+- add support for custom mime-types (see [#199](https://github.com/Stuk/jszip/issues/199)).
+- add an option to set the DEFLATE level (see [#201](https://github.com/Stuk/jszip/issues/201)).
+- improve the error message with corrupted zip (see [#202](https://github.com/Stuk/jszip/issues/202)).
+- add support for UNIX / DOS permissions (see [#200](https://github.com/Stuk/jszip/issues/200) and [#205](https://github.com/Stuk/jszip/issues/205)).
+
+### v2.4.0 2014-07-24
+- update pako to 0.2.5 (see [#156](https://github.com/Stuk/jszip/issues/156)).
+- make JSZip work in a Firefox addon context (see [#151](https://github.com/Stuk/jszip/issues/151)).
+- add an option (`createFolders`) to control the subfolder generation (see [#154](https://github.com/Stuk/jszip/issues/154)).
+- allow `Buffer` polyfill in the browser (see [#139](https://github.com/Stuk/jszip/issues/139)).
+
+### v2.3.0 2014-06-18
+- don't generate subfolders (see [#130](https://github.com/Stuk/jszip/issues/130)).
+- add comment support (see [#134](https://github.com/Stuk/jszip/issues/134)).
+- on `ZipObject#options`, the attributes `date` and `dir` have been deprecated and are now on `ZipObject` (see [the upgrade guide](http://stuk.github.io/jszip/documentation/upgrade_guide.html)).
+- on `ZipObject#options`, the attributes `base64` and `binary` have been deprecated (see [the upgrade guide](http://stuk.github.io/jszip/documentation/upgrade_guide.html)).
+- deprecate internal functions exposed in the public API (see [#123](https://github.com/Stuk/jszip/issues/123)).
+- improve UTF-8 support (see [#142](https://github.com/Stuk/jszip/issues/142)).
+
+### v2.2.2, 2014-05-01
+ - update pako to v0.2.1, fix an error when decompressing some files (see [#126](https://github.com/Stuk/jszip/issues/126)).
+
+### v2.2.1, 2014-04-23
+ - fix unreadable generated file on Windows 8 (see [#112](https://github.com/Stuk/jszip/issues/112)).
+ - replace zlibjs with pako.
+
+### v2.2.0, 2014-02-25
+ - make the `new` operator optional before the `JSZip` constructor (see [#93](https://github.com/Stuk/jszip/pull/93)).
+ - update zlibjs to v0.2.0.
+
+### v2.1.1, 2014-02-13
+ - use the npm package for zlib.js instead of the github url.
+
+### v2.1.0, 2014-02-06
+ - split the files and use Browserify to generate the final file (see [#74](https://github.com/Stuk/jszip/pull/74))
+ - packaging change : instead of 4 files (jszip.js, jszip-load.js, jszip-inflate.js, jszip-deflate.js) we now have 2 files : dist/jszip.js and dist/jszip.min.js
+ - add component/bower support
+ - rename variable: 'byte' is a reserved word (see [#76](https://github.com/Stuk/jszip/pull/76))
+ - add support for the unicode path extra field (see [#82](https://github.com/Stuk/jszip/pull/82))
+ - ensure that the generated files have a header with the licenses (see [#80](https://github.com/Stuk/jszip/pull/80))
+
+# v2.0.0, 2013-10-20
+
+ - `JSZipBase64` has been renamed to `JSZip.base64`.
+ - The `data` attribute on the object returned by `zip.file(name)` has been removed. Use `asText()`, `asBinary()`, `asUint8Array()`, `asArrayBuffer()` or `asNodeBuffer()`.
+
+ - [Fix issue with Android browser](https://github.com/Stuk/jszip/pull/60)
+
+ - The compression/decompression methods now give their input type with the `compressInputType` and `uncompressInputType` attributes.
+ - Lazily decompress data when needed and [improve performance in general](https://github.com/Stuk/jszip/pull/56)
+ - [Add support for `Buffer` in Node.js](https://github.com/Stuk/jszip/pull/57).
+ - Package for CommonJS/npm.
+
+### v1.0.1, 2013-03-04
+
+ - Fixed an issue when generating a compressed zip file with empty files or folders, see #33.
+ - With bad data (null or undefined), asText/asBinary/asUint8Array/asArrayBuffer methods now return an empty string, see #36.
+
+# v1.0.0, 2013-02-14
+
+- First release after a long period without version.

+ 651 - 0
libs/jszip/LICENSE.markdown

@@ -0,0 +1,651 @@
+JSZip is dual licensed. At your choice you may use it under the MIT license *or* the GPLv3
+license.
+
+The MIT License
+===============
+
+Copyright (c) 2009-2016 Stuart Knightley, David Duponchel, Franz Buchinger, António Afonso
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+
+GPL version 3
+=============
+
+                    GNU GENERAL PUBLIC LICENSE
+                       Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+                            Preamble
+
+  The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+  The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works.  By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.  We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors.  You can apply it to
+your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+  To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights.  Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+  For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received.  You must make sure that they, too, receive
+or can get the source code.  And you must show them these terms so they
+know their rights.
+
+  Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+  For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software.  For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+  Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so.  This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software.  The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable.  Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products.  If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+  Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary.  To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+                       TERMS AND CONDITIONS
+
+  0. Definitions.
+
+  "This License" refers to version 3 of the GNU General Public License.
+
+  "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+  "The Program" refers to any copyrightable work licensed under this
+License.  Each licensee is addressed as "you".  "Licensees" and
+"recipients" may be individuals or organizations.
+
+  To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy.  The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+  A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+  To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy.  Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+  To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies.  Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+  An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License.  If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+  1. Source Code.
+
+  The "source code" for a work means the preferred form of the work
+for making modifications to it.  "Object code" means any non-source
+form of a work.
+
+  A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+  The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form.  A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+  The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities.  However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work.  For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+  The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+  The Corresponding Source for a work in source code form is that
+same work.
+
+  2. Basic Permissions.
+
+  All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met.  This License explicitly affirms your unlimited
+permission to run the unmodified Program.  The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work.  This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+  You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force.  You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright.  Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+  Conveying under any other circumstances is permitted solely under
+the conditions stated below.  Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+  3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+  No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+  When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+  4. Conveying Verbatim Copies.
+
+  You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+  You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+  5. Conveying Modified Source Versions.
+
+  You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+    a) The work must carry prominent notices stating that you modified
+    it, and giving a relevant date.
+
+    b) The work must carry prominent notices stating that it is
+    released under this License and any conditions added under section
+    7.  This requirement modifies the requirement in section 4 to
+    "keep intact all notices".
+
+    c) You must license the entire work, as a whole, under this
+    License to anyone who comes into possession of a copy.  This
+    License will therefore apply, along with any applicable section 7
+    additional terms, to the whole of the work, and all its parts,
+    regardless of how they are packaged.  This License gives no
+    permission to license the work in any other way, but it does not
+    invalidate such permission if you have separately received it.
+
+    d) If the work has interactive user interfaces, each must display
+    Appropriate Legal Notices; however, if the Program has interactive
+    interfaces that do not display Appropriate Legal Notices, your
+    work need not make them do so.
+
+  A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit.  Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+  6. Conveying Non-Source Forms.
+
+  You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+    a) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by the
+    Corresponding Source fixed on a durable physical medium
+    customarily used for software interchange.
+
+    b) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by a
+    written offer, valid for at least three years and valid for as
+    long as you offer spare parts or customer support for that product
+    model, to give anyone who possesses the object code either (1) a
+    copy of the Corresponding Source for all the software in the
+    product that is covered by this License, on a durable physical
+    medium customarily used for software interchange, for a price no
+    more than your reasonable cost of physically performing this
+    conveying of source, or (2) access to copy the
+    Corresponding Source from a network server at no charge.
+
+    c) Convey individual copies of the object code with a copy of the
+    written offer to provide the Corresponding Source.  This
+    alternative is allowed only occasionally and noncommercially, and
+    only if you received the object code with such an offer, in accord
+    with subsection 6b.
+
+    d) Convey the object code by offering access from a designated
+    place (gratis or for a charge), and offer equivalent access to the
+    Corresponding Source in the same way through the same place at no
+    further charge.  You need not require recipients to copy the
+    Corresponding Source along with the object code.  If the place to
+    copy the object code is a network server, the Corresponding Source
+    may be on a different server (operated by you or a third party)
+    that supports equivalent copying facilities, provided you maintain
+    clear directions next to the object code saying where to find the
+    Corresponding Source.  Regardless of what server hosts the
+    Corresponding Source, you remain obligated to ensure that it is
+    available for as long as needed to satisfy these requirements.
+
+    e) Convey the object code using peer-to-peer transmission, provided
+    you inform other peers where the object code and Corresponding
+    Source of the work are being offered to the general public at no
+    charge under subsection 6d.
+
+  A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+  A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling.  In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage.  For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product.  A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+  "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source.  The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+  If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information.  But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+  The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed.  Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+  Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+  7. Additional Terms.
+
+  "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law.  If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+  When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it.  (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.)  You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+  Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+    a) Disclaiming warranty or limiting liability differently from the
+    terms of sections 15 and 16 of this License; or
+
+    b) Requiring preservation of specified reasonable legal notices or
+    author attributions in that material or in the Appropriate Legal
+    Notices displayed by works containing it; or
+
+    c) Prohibiting misrepresentation of the origin of that material, or
+    requiring that modified versions of such material be marked in
+    reasonable ways as different from the original version; or
+
+    d) Limiting the use for publicity purposes of names of licensors or
+    authors of the material; or
+
+    e) Declining to grant rights under trademark law for use of some
+    trade names, trademarks, or service marks; or
+
+    f) Requiring indemnification of licensors and authors of that
+    material by anyone who conveys the material (or modified versions of
+    it) with contractual assumptions of liability to the recipient, for
+    any liability that these contractual assumptions directly impose on
+    those licensors and authors.
+
+  All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10.  If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term.  If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+  If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+  Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+  8. Termination.
+
+  You may not propagate or modify a covered work except as expressly
+provided under this License.  Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+  However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+  Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+  Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License.  If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+  9. Acceptance Not Required for Having Copies.
+
+  You are not required to accept this License in order to receive or
+run a copy of the Program.  Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance.  However,
+nothing other than this License grants you permission to propagate or
+modify any covered work.  These actions infringe copyright if you do
+not accept this License.  Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+  10. Automatic Licensing of Downstream Recipients.
+
+  Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License.  You are not responsible
+for enforcing compliance by third parties with this License.
+
+  An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations.  If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+  You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License.  For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+  11. Patents.
+
+  A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based.  The
+work thus licensed is called the contributor's "contributor version".
+
+  A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version.  For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+  Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+  In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement).  To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+  If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients.  "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+  If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+  A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License.  You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+  Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+  12. No Surrender of Others' Freedom.
+
+  If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all.  For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+  13. Use with the GNU Affero General Public License.
+
+  Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work.  The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+  14. Revised Versions of this License.
+
+  The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+  Each version is given a distinguishing version number.  If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation.  If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+  If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+  Later license versions may give you additional or different
+permissions.  However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+  15. Disclaimer of Warranty.
+
+  THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+  16. Limitation of Liability.
+
+  IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+  17. Interpretation of Sections 15 and 16.
+
+  If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+                     END OF TERMS AND CONDITIONS

+ 33 - 0
libs/jszip/README.markdown

@@ -0,0 +1,33 @@
+JSZip
+=====
+
+A library for creating, reading and editing .zip files with JavaScript, with a
+lovely and simple API.
+
+See https://stuk.github.io/jszip for all the documentation.
+
+```javascript
+const zip = new JSZip();
+
+zip.file("Hello.txt", "Hello World\n");
+
+const img = zip.folder("images");
+img.file("smile.gif", imgData, {base64: true});
+
+zip.generateAsync({type:"blob"}).then(function(content) {
+    // see FileSaver.js
+    saveAs(content, "example.zip");
+});
+
+/*
+Results in a zip containing
+Hello.txt
+images/
+    smile.gif
+*/
+```
+License
+-------
+
+JSZip is dual-licensed. You may use it under the MIT license *or* the GPLv3
+license. See [LICENSE.markdown](LICENSE.markdown).

+ 37 - 0
libs/jszip/deps.js

@@ -0,0 +1,37 @@
+"use strict";
+
+const madge = require("madge");
+const path = require("path");
+const ts = require("typescript");
+
+const DIR = "./lib";
+
+async function main() {
+    const tsconfigPath = ts.findConfigFile("./", ts.sys.fileExists);
+    const tsconfig = ts.readConfigFile(tsconfigPath, ts.sys.readFile).config;
+    const parsedConfig = ts.parseJsonConfigFileContent(tsconfig, ts.sys, "./");
+    const typedPaths = parsedConfig.fileNames.map(filename => path.resolve("./", filename));
+
+    const excludeTypedPaths = path => !typedPaths.includes(path);
+
+    const res = await madge(DIR, {
+        dependencyFilter: excludeTypedPaths
+    });
+
+    const untypedLeaves = res.leaves().map(filename => path.resolve(DIR, filename)).filter(excludeTypedPaths);
+    if (untypedLeaves.length) {
+        console.log("Convert next:");
+        console.log(untypedLeaves.join("\n"));
+    } else {
+        console.log("No untyped leaf dependencies found.");
+        console.log("Try looking at circular dependencies, or the image to decide what to convert next:");
+        const untypedCircular = res.circular().flat().map(filename => path.resolve(DIR, filename)).filter(excludeTypedPaths);
+        console.log(untypedCircular.join("\n"));
+    }
+
+    const imagePath = await res.image("graph.svg");
+    console.log();
+    console.log("Image written to " + imagePath);
+}
+
+main();

Разлика између датотеке није приказан због своје велике величине
+ 11577 - 0
libs/jszip/dist/jszip.js


Разлика између датотеке није приказан због своје велике величине
+ 13 - 0
libs/jszip/dist/jszip.min.js


+ 601 - 0
libs/jszip/graph.svg

@@ -0,0 +1,601 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
+ "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
+<!-- Generated by graphviz version 5.0.0 (20220707.1540)
+ -->
+<!-- Title: G Pages: 1 -->
+<svg width="1779pt" height="983pt"
+ viewBox="0.00 0.00 1779.20 982.66" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
+<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(21.6 961.06)">
+<title>G</title>
+<polygon fill="#111111" stroke="transparent" points="-21.6,21.6 -21.6,-961.06 1757.6,-961.06 1757.6,21.6 -21.6,21.6"/>
+<!-- base64.js -->
+<g id="node1" class="node">
+<title>base64.js</title>
+<path fill="none" stroke="#cfffac" d="M119.83,-306C119.83,-306 61.17,-306 61.17,-306 57.33,-306 53.5,-302.17 53.5,-298.33 53.5,-298.33 53.5,-290.67 53.5,-290.67 53.5,-286.83 57.33,-283 61.17,-283 61.17,-283 119.83,-283 119.83,-283 123.67,-283 127.5,-286.83 127.5,-290.67 127.5,-290.67 127.5,-298.33 127.5,-298.33 127.5,-302.17 123.67,-306 119.83,-306"/>
+<text text-anchor="middle" x="90.5" y="-290.8" font-family="Arial" font-size="14.00" fill="#cfffac">base64.js</text>
+</g>
+<!-- compressedObject.js -->
+<g id="node2" class="node">
+<title>compressedObject.js</title>
+<path fill="none" stroke="#c6c5fe" d="M1141.33,-296C1141.33,-296 1012.67,-296 1012.67,-296 1008.83,-296 1005,-292.17 1005,-288.33 1005,-288.33 1005,-280.67 1005,-280.67 1005,-276.83 1008.83,-273 1012.67,-273 1012.67,-273 1141.33,-273 1141.33,-273 1145.17,-273 1149,-276.83 1149,-280.67 1149,-280.67 1149,-288.33 1149,-288.33 1149,-292.17 1145.17,-296 1141.33,-296"/>
+<text text-anchor="middle" x="1077" y="-280.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">compressedObject.js</text>
+</g>
+<!-- stream/Crc32Probe.js -->
+<g id="node3" class="node">
+<title>stream/Crc32Probe.js</title>
+<path fill="none" stroke="#c6c5fe" d="M1348.83,-415C1348.83,-415 1216.17,-415 1216.17,-415 1212.33,-415 1208.5,-411.17 1208.5,-407.33 1208.5,-407.33 1208.5,-399.67 1208.5,-399.67 1208.5,-395.83 1212.33,-392 1216.17,-392 1216.17,-392 1348.83,-392 1348.83,-392 1352.67,-392 1356.5,-395.83 1356.5,-399.67 1356.5,-399.67 1356.5,-407.33 1356.5,-407.33 1356.5,-411.17 1352.67,-415 1348.83,-415"/>
+<text text-anchor="middle" x="1282.5" y="-399.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">stream/Crc32Probe.js</text>
+</g>
+<!-- compressedObject.js&#45;&gt;stream/Crc32Probe.js -->
+<g id="edge1" class="edge">
+<title>compressedObject.js&#45;&gt;stream/Crc32Probe.js</title>
+<path fill="none" stroke="#757575" d="M1089.24,-296.19C1109.84,-316.93 1155.86,-360.17 1203,-383.5 1206.76,-385.36 1210.7,-387.06 1214.75,-388.61"/>
+<polygon fill="#757575" stroke="#757575" points="1213.71,-391.95 1224.3,-391.95 1216.02,-385.35 1213.71,-391.95"/>
+</g>
+<!-- stream/DataLengthProbe.js -->
+<g id="node4" class="node">
+<title>stream/DataLengthProbe.js</title>
+<path fill="none" stroke="#c6c5fe" d="M1605.33,-337C1605.33,-337 1438.67,-337 1438.67,-337 1434.83,-337 1431,-333.17 1431,-329.33 1431,-329.33 1431,-321.67 1431,-321.67 1431,-317.83 1434.83,-314 1438.67,-314 1438.67,-314 1605.33,-314 1605.33,-314 1609.17,-314 1613,-317.83 1613,-321.67 1613,-321.67 1613,-329.33 1613,-329.33 1613,-333.17 1609.17,-337 1605.33,-337"/>
+<text text-anchor="middle" x="1522" y="-321.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">stream/DataLengthProbe.js</text>
+</g>
+<!-- compressedObject.js&#45;&gt;stream/DataLengthProbe.js -->
+<g id="edge2" class="edge">
+<title>compressedObject.js&#45;&gt;stream/DataLengthProbe.js</title>
+<path fill="none" stroke="#757575" d="M1146.96,-296.01C1165.15,-298.77 1184.78,-301.49 1203,-303.5 1275.56,-311.51 1357.77,-317.11 1420.42,-320.65"/>
+<polygon fill="#757575" stroke="#757575" points="1420.52,-324.16 1430.7,-321.22 1420.91,-317.17 1420.52,-324.16"/>
+</g>
+<!-- stream/DataWorker.js -->
+<g id="node5" class="node">
+<title>stream/DataWorker.js</title>
+<path fill="none" stroke="#c6c5fe" d="M1348.33,-294C1348.33,-294 1216.67,-294 1216.67,-294 1212.83,-294 1209,-290.17 1209,-286.33 1209,-286.33 1209,-278.67 1209,-278.67 1209,-274.83 1212.83,-271 1216.67,-271 1216.67,-271 1348.33,-271 1348.33,-271 1352.17,-271 1356,-274.83 1356,-278.67 1356,-278.67 1356,-286.33 1356,-286.33 1356,-290.17 1352.17,-294 1348.33,-294"/>
+<text text-anchor="middle" x="1282.5" y="-278.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">stream/DataWorker.js</text>
+</g>
+<!-- compressedObject.js&#45;&gt;stream/DataWorker.js -->
+<g id="edge3" class="edge">
+<title>compressedObject.js&#45;&gt;stream/DataWorker.js</title>
+<path fill="none" stroke="#757575" d="M1149.17,-283.8C1165.22,-283.64 1182.37,-283.47 1198.84,-283.31"/>
+<polygon fill="#757575" stroke="#757575" points="1199.02,-286.81 1208.99,-283.21 1198.95,-279.81 1199.02,-286.81"/>
+</g>
+<!-- crc32.js -->
+<g id="node8" class="node">
+<title>crc32.js</title>
+<path fill="none" stroke="#c6c5fe" d="M1545.83,-495C1545.83,-495 1498.17,-495 1498.17,-495 1494.33,-495 1490.5,-491.17 1490.5,-487.33 1490.5,-487.33 1490.5,-479.67 1490.5,-479.67 1490.5,-475.83 1494.33,-472 1498.17,-472 1498.17,-472 1545.83,-472 1545.83,-472 1549.67,-472 1553.5,-475.83 1553.5,-479.67 1553.5,-479.67 1553.5,-487.33 1553.5,-487.33 1553.5,-491.17 1549.67,-495 1545.83,-495"/>
+<text text-anchor="middle" x="1522" y="-479.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">crc32.js</text>
+</g>
+<!-- stream/Crc32Probe.js&#45;&gt;crc32.js -->
+<g id="edge42" class="edge">
+<title>stream/Crc32Probe.js&#45;&gt;crc32.js</title>
+<path fill="none" stroke="#757575" d="M1317.78,-415.05C1360.98,-429.6 1435.12,-454.57 1480.9,-469.99"/>
+<polygon fill="#757575" stroke="#757575" points="1479.91,-473.35 1490.5,-473.23 1482.14,-466.72 1479.91,-473.35"/>
+</g>
+<!-- utils.js -->
+<g id="node9" class="node">
+<title>utils.js</title>
+<path fill="none" stroke="#cfffac" d="M1728.33,-495C1728.33,-495 1689.67,-495 1689.67,-495 1685.83,-495 1682,-491.17 1682,-487.33 1682,-487.33 1682,-479.67 1682,-479.67 1682,-475.83 1685.83,-472 1689.67,-472 1689.67,-472 1728.33,-472 1728.33,-472 1732.17,-472 1736,-475.83 1736,-479.67 1736,-479.67 1736,-487.33 1736,-487.33 1736,-491.17 1732.17,-495 1728.33,-495"/>
+<text text-anchor="middle" x="1709" y="-479.8" font-family="Arial" font-size="14.00" fill="#cfffac">utils.js</text>
+</g>
+<!-- stream/Crc32Probe.js&#45;&gt;utils.js -->
+<g id="edge43" class="edge">
+<title>stream/Crc32Probe.js&#45;&gt;utils.js</title>
+<path fill="none" stroke="#757575" d="M1356.76,-395.52C1432.27,-390.27 1551.91,-390.38 1646,-429.5 1663.95,-436.96 1680.34,-451.77 1691.76,-463.99"/>
+<polygon fill="#757575" stroke="#757575" points="1689.24,-466.42 1698.51,-471.54 1694.46,-461.75 1689.24,-466.42"/>
+</g>
+<!-- stream/DataLengthProbe.js&#45;&gt;utils.js -->
+<g id="edge44" class="edge">
+<title>stream/DataLengthProbe.js&#45;&gt;utils.js</title>
+<path fill="none" stroke="#757575" d="M1613.06,-335.12C1624.95,-339.19 1636.34,-344.81 1646,-352.5 1680.91,-380.29 1697.21,-432.45 1703.92,-461.87"/>
+<polygon fill="#757575" stroke="#757575" points="1700.56,-462.88 1706.05,-471.94 1707.41,-461.43 1700.56,-462.88"/>
+</g>
+<!-- stream/DataWorker.js&#45;&gt;utils.js -->
+<g id="edge45" class="edge">
+<title>stream/DataWorker.js&#45;&gt;utils.js</title>
+<path fill="none" stroke="#757575" d="M1328.44,-270.94C1402.75,-254.67 1552.34,-234.37 1646,-305.5 1695.57,-343.15 1705.71,-423.13 1707.64,-461.74"/>
+<polygon fill="#757575" stroke="#757575" points="1704.15,-462.02 1708.01,-471.89 1711.15,-461.77 1704.15,-462.02"/>
+</g>
+<!-- compressions.js -->
+<g id="node6" class="node">
+<title>compressions.js</title>
+<path fill="none" stroke="#c6c5fe" d="M1126.33,-608C1126.33,-608 1027.67,-608 1027.67,-608 1023.83,-608 1020,-604.17 1020,-600.33 1020,-600.33 1020,-592.67 1020,-592.67 1020,-588.83 1023.83,-585 1027.67,-585 1027.67,-585 1126.33,-585 1126.33,-585 1130.17,-585 1134,-588.83 1134,-592.67 1134,-592.67 1134,-600.33 1134,-600.33 1134,-604.17 1130.17,-608 1126.33,-608"/>
+<text text-anchor="middle" x="1077" y="-592.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">compressions.js</text>
+</g>
+<!-- flate.js -->
+<g id="node7" class="node">
+<title>flate.js</title>
+<path fill="none" stroke="#c6c5fe" d="M1542.33,-608C1542.33,-608 1501.67,-608 1501.67,-608 1497.83,-608 1494,-604.17 1494,-600.33 1494,-600.33 1494,-592.67 1494,-592.67 1494,-588.83 1497.83,-585 1501.67,-585 1501.67,-585 1542.33,-585 1542.33,-585 1546.17,-585 1550,-588.83 1550,-592.67 1550,-592.67 1550,-600.33 1550,-600.33 1550,-604.17 1546.17,-608 1542.33,-608"/>
+<text text-anchor="middle" x="1522" y="-592.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">flate.js</text>
+</g>
+<!-- compressions.js&#45;&gt;flate.js -->
+<g id="edge4" class="edge">
+<title>compressions.js&#45;&gt;flate.js</title>
+<path fill="none" stroke="#757575" d="M1134.11,-596.5C1225.94,-596.5 1405.91,-596.5 1483.7,-596.5"/>
+<polygon fill="#757575" stroke="#757575" points="1483.98,-600 1493.98,-596.5 1483.98,-593 1483.98,-600"/>
+</g>
+<!-- flate.js&#45;&gt;utils.js -->
+<g id="edge6" class="edge">
+<title>flate.js&#45;&gt;utils.js</title>
+<path fill="none" stroke="#757575" d="M1550.09,-596.32C1576.55,-594.94 1616.97,-589.52 1646,-570.5 1670.69,-554.33 1688.64,-524.65 1698.69,-504.5"/>
+<polygon fill="#757575" stroke="#757575" points="1701.97,-505.75 1703.1,-495.22 1695.65,-502.75 1701.97,-505.75"/>
+</g>
+<!-- crc32.js&#45;&gt;utils.js -->
+<g id="edge5" class="edge">
+<title>crc32.js&#45;&gt;utils.js</title>
+<path fill="none" stroke="#757575" d="M1553.55,-483.5C1586.01,-483.5 1637.3,-483.5 1671.78,-483.5"/>
+<polygon fill="#757575" stroke="#757575" points="1671.9,-487 1681.9,-483.5 1671.9,-480 1671.9,-487"/>
+</g>
+<!-- defaults.js -->
+<g id="node10" class="node">
+<title>defaults.js</title>
+<path fill="none" stroke="#cfffac" d="M121.33,-388C121.33,-388 59.67,-388 59.67,-388 55.83,-388 52,-384.17 52,-380.33 52,-380.33 52,-372.67 52,-372.67 52,-368.83 55.83,-365 59.67,-365 59.67,-365 121.33,-365 121.33,-365 125.17,-365 129,-368.83 129,-372.67 129,-372.67 129,-380.33 129,-380.33 129,-384.17 125.17,-388 121.33,-388"/>
+<text text-anchor="middle" x="90.5" y="-372.8" font-family="Arial" font-size="14.00" fill="#cfffac">defaults.js</text>
+</g>
+<!-- external.js -->
+<g id="node11" class="node">
+<title>external.js</title>
+<path fill="none" stroke="#cfffac" d="M121.83,-429C121.83,-429 59.17,-429 59.17,-429 55.33,-429 51.5,-425.17 51.5,-421.33 51.5,-421.33 51.5,-413.67 51.5,-413.67 51.5,-409.83 55.33,-406 59.17,-406 59.17,-406 121.83,-406 121.83,-406 125.67,-406 129.5,-409.83 129.5,-413.67 129.5,-413.67 129.5,-421.33 129.5,-421.33 129.5,-425.17 125.67,-429 121.83,-429"/>
+<text text-anchor="middle" x="90.5" y="-413.8" font-family="Arial" font-size="14.00" fill="#cfffac">external.js</text>
+</g>
+<!-- generate/ZipFileWorker.js -->
+<g id="node12" class="node">
+<title>generate/ZipFileWorker.js</title>
+<path fill="none" stroke="#c6c5fe" d="M1154.33,-490C1154.33,-490 999.67,-490 999.67,-490 995.83,-490 992,-486.17 992,-482.33 992,-482.33 992,-474.67 992,-474.67 992,-470.83 995.83,-467 999.67,-467 999.67,-467 1154.33,-467 1154.33,-467 1158.17,-467 1162,-470.83 1162,-474.67 1162,-474.67 1162,-482.33 1162,-482.33 1162,-486.17 1158.17,-490 1154.33,-490"/>
+<text text-anchor="middle" x="1077" y="-474.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">generate/ZipFileWorker.js</text>
+</g>
+<!-- generate/ZipFileWorker.js&#45;&gt;crc32.js -->
+<g id="edge7" class="edge">
+<title>generate/ZipFileWorker.js&#45;&gt;crc32.js</title>
+<path fill="none" stroke="#757575" d="M1162.11,-479.45C1257.3,-480.52 1408.45,-482.23 1480.38,-483.04"/>
+<polygon fill="#757575" stroke="#757575" points="1480.36,-486.54 1490.4,-483.15 1480.44,-479.54 1480.36,-486.54"/>
+</g>
+<!-- generate/ZipFileWorker.js&#45;&gt;utils.js -->
+<g id="edge9" class="edge">
+<title>generate/ZipFileWorker.js&#45;&gt;utils.js</title>
+<path fill="none" stroke="#757575" d="M1160.91,-466.95C1272.08,-453.58 1475.22,-436.64 1646,-463.5 1654.67,-464.86 1663.79,-467.24 1672.28,-469.89"/>
+<polygon fill="#757575" stroke="#757575" points="1671.3,-473.25 1681.89,-473.08 1673.5,-466.61 1671.3,-473.25"/>
+</g>
+<!-- utf8.js -->
+<g id="node13" class="node">
+<title>utf8.js</title>
+<path fill="none" stroke="#c6c5fe" d="M1301.83,-374C1301.83,-374 1263.17,-374 1263.17,-374 1259.33,-374 1255.5,-370.17 1255.5,-366.33 1255.5,-366.33 1255.5,-358.67 1255.5,-358.67 1255.5,-354.83 1259.33,-351 1263.17,-351 1263.17,-351 1301.83,-351 1301.83,-351 1305.67,-351 1309.5,-354.83 1309.5,-358.67 1309.5,-358.67 1309.5,-366.33 1309.5,-366.33 1309.5,-370.17 1305.67,-374 1301.83,-374"/>
+<text text-anchor="middle" x="1282.5" y="-358.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">utf8.js</text>
+</g>
+<!-- generate/ZipFileWorker.js&#45;&gt;utf8.js -->
+<g id="edge8" class="edge">
+<title>generate/ZipFileWorker.js&#45;&gt;utf8.js</title>
+<path fill="none" stroke="#757575" d="M1095.05,-466.9C1113.21,-454.39 1142.63,-433.7 1167,-414.5 1183.59,-401.44 1184.33,-393.37 1203,-383.5 1216.02,-376.62 1231.48,-371.86 1245.21,-368.63"/>
+<polygon fill="#757575" stroke="#757575" points="1246.28,-371.98 1255.32,-366.45 1244.8,-365.14 1246.28,-371.98"/>
+</g>
+<!-- utf8.js&#45;&gt;utils.js -->
+<g id="edge49" class="edge">
+<title>utf8.js&#45;&gt;utils.js</title>
+<path fill="none" stroke="#757575" d="M1309.66,-361.43C1383.03,-358.9 1589.11,-355.01 1646,-388.5 1674.11,-405.05 1691.8,-439.95 1700.79,-462.53"/>
+<polygon fill="#757575" stroke="#757575" points="1697.52,-463.79 1704.31,-471.93 1704.08,-461.34 1697.52,-463.79"/>
+</g>
+<!-- generate/index.js -->
+<g id="node14" class="node">
+<title>generate/index.js</title>
+<path fill="none" stroke="#c6c5fe" d="M884.83,-490C884.83,-490 781.17,-490 781.17,-490 777.33,-490 773.5,-486.17 773.5,-482.33 773.5,-482.33 773.5,-474.67 773.5,-474.67 773.5,-470.83 777.33,-467 781.17,-467 781.17,-467 884.83,-467 884.83,-467 888.67,-467 892.5,-470.83 892.5,-474.67 892.5,-474.67 892.5,-482.33 892.5,-482.33 892.5,-486.17 888.67,-490 884.83,-490"/>
+<text text-anchor="middle" x="833" y="-474.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">generate/index.js</text>
+</g>
+<!-- generate/index.js&#45;&gt;compressions.js -->
+<g id="edge10" class="edge">
+<title>generate/index.js&#45;&gt;compressions.js</title>
+<path fill="none" stroke="#757575" d="M857.64,-490.03C901.11,-511.22 993.77,-556.4 1043.25,-580.53"/>
+<polygon fill="#757575" stroke="#757575" points="1041.74,-583.69 1052.26,-584.92 1044.81,-577.4 1041.74,-583.69"/>
+</g>
+<!-- generate/index.js&#45;&gt;generate/ZipFileWorker.js -->
+<g id="edge11" class="edge">
+<title>generate/index.js&#45;&gt;generate/ZipFileWorker.js</title>
+<path fill="none" stroke="#757575" d="M892.58,-478.5C919.35,-478.5 951.73,-478.5 981.79,-478.5"/>
+<polygon fill="#757575" stroke="#757575" points="981.89,-482 991.89,-478.5 981.89,-475 981.89,-482"/>
+</g>
+<!-- index.js -->
+<g id="node15" class="node">
+<title>index.js</title>
+<path fill="none" stroke="#c6c5fe" d="M113.83,-347C113.83,-347 67.17,-347 67.17,-347 63.33,-347 59.5,-343.17 59.5,-339.33 59.5,-339.33 59.5,-331.67 59.5,-331.67 59.5,-327.83 63.33,-324 67.17,-324 67.17,-324 113.83,-324 113.83,-324 117.67,-324 121.5,-327.83 121.5,-331.67 121.5,-331.67 121.5,-339.33 121.5,-339.33 121.5,-343.17 117.67,-347 113.83,-347"/>
+<text text-anchor="middle" x="90.5" y="-331.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">index.js</text>
+</g>
+<!-- load.js -->
+<g id="node16" class="node">
+<title>load.js</title>
+<path fill="none" stroke="#c6c5fe" d="M265.33,-416C265.33,-416 224.67,-416 224.67,-416 220.83,-416 217,-412.17 217,-408.33 217,-408.33 217,-400.67 217,-400.67 217,-396.83 220.83,-393 224.67,-393 224.67,-393 265.33,-393 265.33,-393 269.17,-393 273,-396.83 273,-400.67 273,-400.67 273,-408.33 273,-408.33 273,-412.17 269.17,-416 265.33,-416"/>
+<text text-anchor="middle" x="245" y="-400.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">load.js</text>
+</g>
+<!-- index.js&#45;&gt;load.js -->
+<g id="edge12" class="edge">
+<title>index.js&#45;&gt;load.js</title>
+<path fill="none" stroke="#757575" d="M121.56,-339.32C139.45,-342.37 162.23,-347.65 181,-356.5 196.81,-363.96 212.46,-375.85 224.2,-385.93"/>
+<polygon fill="#757575" stroke="#757575" points="222.21,-388.84 232.01,-392.86 226.85,-383.6 222.21,-388.84"/>
+</g>
+<!-- object.js -->
+<g id="node17" class="node">
+<title>object.js</title>
+<path fill="none" stroke="#c6c5fe" d="M638.83,-240C638.83,-240 588.17,-240 588.17,-240 584.33,-240 580.5,-236.17 580.5,-232.33 580.5,-232.33 580.5,-224.67 580.5,-224.67 580.5,-220.83 584.33,-217 588.17,-217 588.17,-217 638.83,-217 638.83,-217 642.67,-217 646.5,-220.83 646.5,-224.67 646.5,-224.67 646.5,-232.33 646.5,-232.33 646.5,-236.17 642.67,-240 638.83,-240"/>
+<text text-anchor="middle" x="613.5" y="-224.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">object.js</text>
+</g>
+<!-- index.js&#45;&gt;object.js -->
+<g id="edge13" class="edge">
+<title>index.js&#45;&gt;object.js</title>
+<path fill="none" stroke="#757575" d="M121.76,-329.28C210.16,-311.13 468.14,-258.15 570.22,-237.18"/>
+<polygon fill="#757575" stroke="#757575" points="571.08,-240.58 580.17,-235.14 569.67,-233.72 571.08,-240.58"/>
+</g>
+<!-- load.js&#45;&gt;stream/Crc32Probe.js -->
+<g id="edge14" class="edge">
+<title>load.js&#45;&gt;stream/Crc32Probe.js</title>
+<path fill="none" stroke="#757575" d="M273.16,-404.47C405.01,-404.35 980.76,-403.79 1198.06,-403.58"/>
+<polygon fill="#757575" stroke="#757575" points="1198.14,-407.08 1208.14,-403.57 1198.14,-400.08 1198.14,-407.08"/>
+</g>
+<!-- load.js&#45;&gt;utils.js -->
+<g id="edge16" class="edge">
+<title>load.js&#45;&gt;utils.js</title>
+<path fill="none" stroke="#757575" d="M273.2,-411.94C344.82,-431.09 544.77,-481.85 715,-499.5 1016.95,-530.8 1094.44,-503.39 1398,-504.5 1508.22,-504.9 1537.24,-522.42 1646,-504.5 1654.7,-503.07 1663.83,-500.57 1672.32,-497.79"/>
+<polygon fill="#757575" stroke="#757575" points="1673.64,-501.04 1681.94,-494.44 1671.34,-494.43 1673.64,-501.04"/>
+</g>
+<!-- load.js&#45;&gt;utf8.js -->
+<g id="edge15" class="edge">
+<title>load.js&#45;&gt;utf8.js</title>
+<path fill="none" stroke="#757575" d="M273.18,-396.1C315.32,-383.84 399.17,-362.5 472,-362.5 472,-362.5 472,-362.5 834,-362.5 986.85,-362.5 1169.08,-362.5 1245.24,-362.5"/>
+<polygon fill="#757575" stroke="#757575" points="1245.27,-366 1255.27,-362.5 1245.27,-359 1245.27,-366"/>
+</g>
+<!-- zipEntries.js -->
+<g id="node19" class="node">
+<title>zipEntries.js</title>
+<path fill="none" stroke="#c6c5fe" d="M390.33,-648C390.33,-648 316.67,-648 316.67,-648 312.83,-648 309,-644.17 309,-640.33 309,-640.33 309,-632.67 309,-632.67 309,-628.83 312.83,-625 316.67,-625 316.67,-625 390.33,-625 390.33,-625 394.17,-625 398,-628.83 398,-632.67 398,-632.67 398,-640.33 398,-640.33 398,-644.17 394.17,-648 390.33,-648"/>
+<text text-anchor="middle" x="353.5" y="-632.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">zipEntries.js</text>
+</g>
+<!-- load.js&#45;&gt;zipEntries.js -->
+<g id="edge17" class="edge">
+<title>load.js&#45;&gt;zipEntries.js</title>
+<path fill="none" stroke="#757575" d="M251.28,-416C268.17,-452.8 321.05,-568 342.9,-615.6"/>
+<polygon fill="#757575" stroke="#757575" points="339.83,-617.3 347.19,-624.93 346.2,-614.38 339.83,-617.3"/>
+</g>
+<!-- object.js&#45;&gt;compressedObject.js -->
+<g id="edge20" class="edge">
+<title>object.js&#45;&gt;compressedObject.js</title>
+<path fill="none" stroke="#757575" d="M646.59,-232.39C717.9,-241.05 891.41,-262.1 994.73,-274.64"/>
+<polygon fill="#757575" stroke="#757575" points="994.46,-278.13 1004.81,-275.86 995.31,-271.18 994.46,-278.13"/>
+</g>
+<!-- object.js&#45;&gt;utils.js -->
+<g id="edge25" class="edge">
+<title>object.js&#45;&gt;utils.js</title>
+<path fill="none" stroke="#757575" d="M637.38,-216.91C657.32,-207.43 687.31,-194.61 715,-188.5 871.92,-153.89 915.31,-168.5 1076,-168.5 1076,-168.5 1076,-168.5 1283.5,-168.5 1448.68,-168.5 1527.02,-135.92 1646,-250.5 1676.62,-279.99 1697.68,-409.34 1705.07,-461.54"/>
+<polygon fill="#757575" stroke="#757575" points="1701.64,-462.29 1706.48,-471.71 1708.58,-461.33 1701.64,-462.29"/>
+</g>
+<!-- object.js&#45;&gt;utf8.js -->
+<g id="edge24" class="edge">
+<title>object.js&#45;&gt;utf8.js</title>
+<path fill="none" stroke="#757575" d="M639.18,-240.05C659.22,-249 688.41,-261.09 715,-268.5 816.54,-296.78 1136.85,-342.48 1245.24,-357.52"/>
+<polygon fill="#757575" stroke="#757575" points="1244.96,-361.02 1255.35,-358.92 1245.92,-354.08 1244.96,-361.02"/>
+</g>
+<!-- object.js&#45;&gt;generate/index.js -->
+<g id="edge21" class="edge">
+<title>object.js&#45;&gt;generate/index.js</title>
+<path fill="none" stroke="#757575" d="M624.7,-240.22C659.23,-279.92 772.01,-409.55 815.08,-459.05"/>
+<polygon fill="#757575" stroke="#757575" points="812.62,-461.56 821.82,-466.8 817.9,-456.96 812.62,-461.56"/>
+</g>
+<!-- nodejs/NodejsStreamInputAdapter.js -->
+<g id="node20" class="node">
+<title>nodejs/NodejsStreamInputAdapter.js</title>
+<path fill="none" stroke="#c6c5fe" d="M943.33,-123C943.33,-123 722.67,-123 722.67,-123 718.83,-123 715,-119.17 715,-115.33 715,-115.33 715,-107.67 715,-107.67 715,-103.83 718.83,-100 722.67,-100 722.67,-100 943.33,-100 943.33,-100 947.17,-100 951,-103.83 951,-107.67 951,-107.67 951,-115.33 951,-115.33 951,-119.17 947.17,-123 943.33,-123"/>
+<text text-anchor="middle" x="833" y="-107.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">nodejs/NodejsStreamInputAdapter.js</text>
+</g>
+<!-- object.js&#45;&gt;nodejs/NodejsStreamInputAdapter.js -->
+<g id="edge22" class="edge">
+<title>object.js&#45;&gt;nodejs/NodejsStreamInputAdapter.js</title>
+<path fill="none" stroke="#757575" d="M627.66,-216.9C646.11,-201.09 681.22,-172.67 715,-154.5 736.24,-143.08 761.16,-133.45 782.53,-126.22"/>
+<polygon fill="#757575" stroke="#757575" points="783.67,-129.53 792.07,-123.07 781.47,-122.88 783.67,-129.53"/>
+</g>
+<!-- stream/StreamHelper.js -->
+<g id="node23" class="node">
+<title>stream/StreamHelper.js</title>
+<path fill="none" stroke="#c6c5fe" d="M1354.33,-66C1354.33,-66 1210.67,-66 1210.67,-66 1206.83,-66 1203,-62.17 1203,-58.33 1203,-58.33 1203,-50.67 1203,-50.67 1203,-46.83 1206.83,-43 1210.67,-43 1210.67,-43 1354.33,-43 1354.33,-43 1358.17,-43 1362,-46.83 1362,-50.67 1362,-50.67 1362,-58.33 1362,-58.33 1362,-62.17 1358.17,-66 1354.33,-66"/>
+<text text-anchor="middle" x="1282.5" y="-50.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">stream/StreamHelper.js</text>
+</g>
+<!-- object.js&#45;&gt;stream/StreamHelper.js -->
+<g id="edge23" class="edge">
+<title>object.js&#45;&gt;stream/StreamHelper.js</title>
+<path fill="none" stroke="#757575" d="M618.86,-216.93C629.97,-189.27 662.27,-119.47 715,-90.5 875.6,-2.26 1101.77,-23.21 1213.53,-41.29"/>
+<polygon fill="#757575" stroke="#757575" points="1213.22,-44.78 1223.66,-42.97 1214.37,-37.88 1213.22,-44.78"/>
+</g>
+<!-- zipObject.js -->
+<g id="node24" class="node">
+<title>zipObject.js</title>
+<path fill="none" stroke="#c6c5fe" d="M868.83,-220C868.83,-220 797.17,-220 797.17,-220 793.33,-220 789.5,-216.17 789.5,-212.33 789.5,-212.33 789.5,-204.67 789.5,-204.67 789.5,-200.83 793.33,-197 797.17,-197 797.17,-197 868.83,-197 868.83,-197 872.67,-197 876.5,-200.83 876.5,-204.67 876.5,-204.67 876.5,-212.33 876.5,-212.33 876.5,-216.17 872.67,-220 868.83,-220"/>
+<text text-anchor="middle" x="833" y="-204.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">zipObject.js</text>
+</g>
+<!-- object.js&#45;&gt;zipObject.js -->
+<g id="edge26" class="edge">
+<title>object.js&#45;&gt;zipObject.js</title>
+<path fill="none" stroke="#757575" d="M646.59,-225.55C681.45,-222.34 737.69,-217.17 779.1,-213.36"/>
+<polygon fill="#757575" stroke="#757575" points="779.65,-216.83 789.29,-212.43 779.01,-209.86 779.65,-216.83"/>
+</g>
+<!-- license_header.js -->
+<g id="node18" class="node">
+<title>license_header.js</title>
+<path fill="none" stroke="#cfffac" d="M143.33,-470C143.33,-470 37.67,-470 37.67,-470 33.83,-470 30,-466.17 30,-462.33 30,-462.33 30,-454.67 30,-454.67 30,-450.83 33.83,-447 37.67,-447 37.67,-447 143.33,-447 143.33,-447 147.17,-447 151,-450.83 151,-454.67 151,-454.67 151,-462.33 151,-462.33 151,-466.17 147.17,-470 143.33,-470"/>
+<text text-anchor="middle" x="90.5" y="-454.8" font-family="Arial" font-size="14.00" fill="#cfffac">license_header.js</text>
+</g>
+<!-- zipEntries.js&#45;&gt;utils.js -->
+<g id="edge51" class="edge">
+<title>zipEntries.js&#45;&gt;utils.js</title>
+<path fill="none" stroke="#757575" d="M358.78,-648.1C378.66,-699.78 467.27,-906.5 612.5,-906.5 612.5,-906.5 612.5,-906.5 1283.5,-906.5 1444.91,-906.5 1526.7,-993.22 1646,-884.5 1702.34,-833.16 1707.73,-582.1 1708.05,-505.64"/>
+<polygon fill="#757575" stroke="#757575" points="1711.55,-505.3 1708.07,-495.3 1704.55,-505.29 1711.55,-505.3"/>
+</g>
+<!-- reader/readerFor.js -->
+<g id="node31" class="node">
+<title>reader/readerFor.js</title>
+<path fill="none" stroke="#c6c5fe" d="M671.33,-728C671.33,-728 555.67,-728 555.67,-728 551.83,-728 548,-724.17 548,-720.33 548,-720.33 548,-712.67 548,-712.67 548,-708.83 551.83,-705 555.67,-705 555.67,-705 671.33,-705 671.33,-705 675.17,-705 679,-708.83 679,-712.67 679,-712.67 679,-720.33 679,-720.33 679,-724.17 675.17,-728 671.33,-728"/>
+<text text-anchor="middle" x="613.5" y="-712.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">reader/readerFor.js</text>
+</g>
+<!-- zipEntries.js&#45;&gt;reader/readerFor.js -->
+<g id="edge50" class="edge">
+<title>zipEntries.js&#45;&gt;reader/readerFor.js</title>
+<path fill="none" stroke="#757575" d="M391.74,-648.05C437.66,-662.29 515.74,-686.5 565.68,-701.98"/>
+<polygon fill="#757575" stroke="#757575" points="564.68,-705.34 575.27,-704.96 566.76,-698.65 564.68,-705.34"/>
+</g>
+<!-- zipEntry.js -->
+<g id="node36" class="node">
+<title>zipEntry.js</title>
+<path fill="none" stroke="#c6c5fe" d="M504.33,-608C504.33,-608 441.67,-608 441.67,-608 437.83,-608 434,-604.17 434,-600.33 434,-600.33 434,-592.67 434,-592.67 434,-588.83 437.83,-585 441.67,-585 441.67,-585 504.33,-585 504.33,-585 508.17,-585 512,-588.83 512,-592.67 512,-592.67 512,-600.33 512,-600.33 512,-604.17 508.17,-608 504.33,-608"/>
+<text text-anchor="middle" x="473" y="-592.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">zipEntry.js</text>
+</g>
+<!-- zipEntries.js&#45;&gt;zipEntry.js -->
+<g id="edge52" class="edge">
+<title>zipEntries.js&#45;&gt;zipEntry.js</title>
+<path fill="none" stroke="#757575" d="M388.49,-624.93C400.91,-620.7 415.14,-615.86 428.29,-611.38"/>
+<polygon fill="#757575" stroke="#757575" points="429.75,-614.58 438.09,-608.04 427.5,-607.95 429.75,-614.58"/>
+</g>
+<!-- nodejs/NodejsStreamInputAdapter.js&#45;&gt;utils.js -->
+<g id="edge18" class="edge">
+<title>nodejs/NodejsStreamInputAdapter.js&#45;&gt;utils.js</title>
+<path fill="none" stroke="#757575" d="M951.31,-109.85C1163.75,-108 1595,-109.88 1646,-154.5 1692.42,-195.11 1704.48,-394.49 1707.25,-461.49"/>
+<polygon fill="#757575" stroke="#757575" points="1703.77,-461.87 1707.65,-471.73 1710.76,-461.61 1703.77,-461.87"/>
+</g>
+<!-- nodejs/NodejsStreamOutputAdapter.js -->
+<g id="node21" class="node">
+<title>nodejs/NodejsStreamOutputAdapter.js</title>
+<path fill="none" stroke="#c6c5fe" d="M1638.33,-64C1638.33,-64 1405.67,-64 1405.67,-64 1401.83,-64 1398,-60.17 1398,-56.33 1398,-56.33 1398,-48.67 1398,-48.67 1398,-44.83 1401.83,-41 1405.67,-41 1405.67,-41 1638.33,-41 1638.33,-41 1642.17,-41 1646,-44.83 1646,-48.67 1646,-48.67 1646,-56.33 1646,-56.33 1646,-60.17 1642.17,-64 1638.33,-64"/>
+<text text-anchor="middle" x="1522" y="-48.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">nodejs/NodejsStreamOutputAdapter.js</text>
+</g>
+<!-- nodejs/NodejsStreamOutputAdapter.js&#45;&gt;utils.js -->
+<g id="edge19" class="edge">
+<title>nodejs/NodejsStreamOutputAdapter.js&#45;&gt;utils.js</title>
+<path fill="none" stroke="#757575" d="M1626.86,-64.14C1633.94,-67.92 1640.45,-72.63 1646,-78.5 1698.86,-134.43 1706.66,-385.18 1707.8,-461.43"/>
+<polygon fill="#757575" stroke="#757575" points="1704.31,-461.79 1707.93,-471.75 1711.31,-461.7 1704.31,-461.79"/>
+</g>
+<!-- nodejsUtils.js -->
+<g id="node22" class="node">
+<title>nodejsUtils.js</title>
+<path fill="none" stroke="#cfffac" d="M130.83,-511C130.83,-511 50.17,-511 50.17,-511 46.33,-511 42.5,-507.17 42.5,-503.33 42.5,-503.33 42.5,-495.67 42.5,-495.67 42.5,-491.83 46.33,-488 50.17,-488 50.17,-488 130.83,-488 130.83,-488 134.67,-488 138.5,-491.83 138.5,-495.67 138.5,-495.67 138.5,-503.33 138.5,-503.33 138.5,-507.17 134.67,-511 130.83,-511"/>
+<text text-anchor="middle" x="90.5" y="-495.8" font-family="Arial" font-size="14.00" fill="#cfffac">nodejsUtils.js</text>
+</g>
+<!-- stream/StreamHelper.js&#45;&gt;utils.js -->
+<g id="edge48" class="edge">
+<title>stream/StreamHelper.js&#45;&gt;utils.js</title>
+<path fill="none" stroke="#757575" d="M1354.06,-66.05C1458.79,-83.26 1643.42,-113.91 1646,-116.5 1694.96,-165.56 1705.4,-390.11 1707.5,-461.58"/>
+<polygon fill="#757575" stroke="#757575" points="1704,-461.79 1707.77,-471.69 1711,-461.6 1704,-461.79"/>
+</g>
+<!-- stream/StreamHelper.js&#45;&gt;nodejs/NodejsStreamOutputAdapter.js -->
+<g id="edge46" class="edge">
+<title>stream/StreamHelper.js&#45;&gt;nodejs/NodejsStreamOutputAdapter.js</title>
+<path fill="none" stroke="#757575" d="M1362.25,-53.84C1370.5,-53.77 1379.04,-53.7 1387.69,-53.62"/>
+<polygon fill="#757575" stroke="#757575" points="1387.91,-57.12 1397.88,-53.54 1387.85,-50.12 1387.91,-57.12"/>
+</g>
+<!-- stream/ConvertWorker.js -->
+<g id="node33" class="node">
+<title>stream/ConvertWorker.js</title>
+<path fill="none" stroke="#c6c5fe" d="M1597.33,-23C1597.33,-23 1446.67,-23 1446.67,-23 1442.83,-23 1439,-19.17 1439,-15.33 1439,-15.33 1439,-7.67 1439,-7.67 1439,-3.83 1442.83,0 1446.67,0 1446.67,0 1597.33,0 1597.33,0 1601.17,0 1605,-3.83 1605,-7.67 1605,-7.67 1605,-15.33 1605,-15.33 1605,-19.17 1601.17,-23 1597.33,-23"/>
+<text text-anchor="middle" x="1522" y="-7.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">stream/ConvertWorker.js</text>
+</g>
+<!-- stream/StreamHelper.js&#45;&gt;stream/ConvertWorker.js -->
+<g id="edge47" class="edge">
+<title>stream/StreamHelper.js&#45;&gt;stream/ConvertWorker.js</title>
+<path fill="none" stroke="#757575" d="M1338.75,-42.97C1357.52,-39.17 1378.65,-35.02 1398,-31.5 1410.72,-29.19 1424.21,-26.87 1437.41,-24.66"/>
+<polygon fill="#757575" stroke="#757575" points="1438.13,-28.09 1447.43,-23.01 1436.99,-21.19 1438.13,-28.09"/>
+</g>
+<!-- zipObject.js&#45;&gt;compressedObject.js -->
+<g id="edge59" class="edge">
+<title>zipObject.js&#45;&gt;compressedObject.js</title>
+<path fill="none" stroke="#757575" d="M876.59,-214.08C899.3,-218.01 927.36,-224.41 951,-234.5 968.69,-242.05 969.57,-250.38 987,-258.5 996.4,-262.88 1006.7,-266.7 1016.86,-269.97"/>
+<polygon fill="#757575" stroke="#757575" points="1015.93,-273.35 1026.52,-272.93 1017.99,-266.65 1015.93,-273.35"/>
+</g>
+<!-- zipObject.js&#45;&gt;stream/DataWorker.js -->
+<g id="edge60" class="edge">
+<title>zipObject.js&#45;&gt;stream/DataWorker.js</title>
+<path fill="none" stroke="#757575" d="M876.54,-206.49C940.6,-204.53 1065.15,-204.91 1167,-230.5 1197.57,-238.18 1230.08,-253.74 1252.61,-265.84"/>
+<polygon fill="#757575" stroke="#757575" points="1251.18,-269.05 1261.63,-270.79 1254.54,-262.91 1251.18,-269.05"/>
+</g>
+<!-- zipObject.js&#45;&gt;utf8.js -->
+<g id="edge62" class="edge">
+<title>zipObject.js&#45;&gt;utf8.js</title>
+<path fill="none" stroke="#757575" d="M876.56,-214.91C963.47,-228.1 1154.29,-257.62 1167,-264.5 1187.75,-275.73 1185.5,-287.68 1203,-303.5 1219.75,-318.64 1240.19,-333.91 1256.01,-345.12"/>
+<polygon fill="#757575" stroke="#757575" points="1254.07,-348.04 1264.27,-350.91 1258.08,-342.3 1254.07,-348.04"/>
+</g>
+<!-- zipObject.js&#45;&gt;stream/StreamHelper.js -->
+<g id="edge61" class="edge">
+<title>zipObject.js&#45;&gt;stream/StreamHelper.js</title>
+<path fill="none" stroke="#757575" d="M867.64,-196.92C947.67,-169.38 1149.36,-99.97 1238.27,-69.38"/>
+<polygon fill="#757575" stroke="#757575" points="1239.46,-72.67 1247.78,-66.1 1237.19,-66.05 1239.46,-72.67"/>
+</g>
+<!-- readable&#45;stream&#45;browser.js -->
+<g id="node25" class="node">
+<title>readable&#45;stream&#45;browser.js</title>
+<path fill="none" stroke="#cfffac" d="M173.33,-552C173.33,-552 7.67,-552 7.67,-552 3.83,-552 0,-548.17 0,-544.33 0,-544.33 0,-536.67 0,-536.67 0,-532.83 3.83,-529 7.67,-529 7.67,-529 173.33,-529 173.33,-529 177.17,-529 181,-532.83 181,-536.67 181,-536.67 181,-544.33 181,-544.33 181,-548.17 177.17,-552 173.33,-552"/>
+<text text-anchor="middle" x="90.5" y="-536.8" font-family="Arial" font-size="14.00" fill="#cfffac">readable&#45;stream&#45;browser.js</text>
+</g>
+<!-- reader/ArrayReader.js -->
+<g id="node26" class="node">
+<title>reader/ArrayReader.js</title>
+<path fill="none" stroke="#c6c5fe" d="M1349.33,-802C1349.33,-802 1215.67,-802 1215.67,-802 1211.83,-802 1208,-798.17 1208,-794.33 1208,-794.33 1208,-786.67 1208,-786.67 1208,-782.83 1211.83,-779 1215.67,-779 1215.67,-779 1349.33,-779 1349.33,-779 1353.17,-779 1357,-782.83 1357,-786.67 1357,-786.67 1357,-794.33 1357,-794.33 1357,-798.17 1353.17,-802 1349.33,-802"/>
+<text text-anchor="middle" x="1282.5" y="-786.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">reader/ArrayReader.js</text>
+</g>
+<!-- reader/ArrayReader.js&#45;&gt;utils.js -->
+<g id="edge28" class="edge">
+<title>reader/ArrayReader.js&#45;&gt;utils.js</title>
+<path fill="none" stroke="#757575" d="M1357.26,-795.2C1450.61,-799.5 1604.29,-800.54 1646,-764.5 1685.42,-730.44 1701.76,-565.37 1706.46,-505.36"/>
+<polygon fill="#757575" stroke="#757575" points="1709.97,-505.37 1707.23,-495.13 1702.99,-504.84 1709.97,-505.37"/>
+</g>
+<!-- reader/DataReader.js -->
+<g id="node27" class="node">
+<title>reader/DataReader.js</title>
+<path fill="none" stroke="#c6c5fe" d="M1586.83,-840C1586.83,-840 1457.17,-840 1457.17,-840 1453.33,-840 1449.5,-836.17 1449.5,-832.33 1449.5,-832.33 1449.5,-824.67 1449.5,-824.67 1449.5,-820.83 1453.33,-817 1457.17,-817 1457.17,-817 1586.83,-817 1586.83,-817 1590.67,-817 1594.5,-820.83 1594.5,-824.67 1594.5,-824.67 1594.5,-832.33 1594.5,-832.33 1594.5,-836.17 1590.67,-840 1586.83,-840"/>
+<text text-anchor="middle" x="1522" y="-824.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">reader/DataReader.js</text>
+</g>
+<!-- reader/ArrayReader.js&#45;&gt;reader/DataReader.js -->
+<g id="edge27" class="edge">
+<title>reader/ArrayReader.js&#45;&gt;reader/DataReader.js</title>
+<path fill="none" stroke="#757575" d="M1355.4,-802C1381.91,-806.25 1412.13,-811.08 1439.43,-815.45"/>
+<polygon fill="#757575" stroke="#757575" points="1438.91,-818.91 1449.34,-817.03 1440.02,-812 1438.91,-818.91"/>
+</g>
+<!-- reader/DataReader.js&#45;&gt;utils.js -->
+<g id="edge29" class="edge">
+<title>reader/DataReader.js&#45;&gt;utils.js</title>
+<path fill="none" stroke="#757575" d="M1594.67,-827.36C1613.57,-823.63 1632.45,-816.27 1646,-802.5 1687.8,-760.01 1702.84,-570.31 1706.81,-505.38"/>
+<polygon fill="#757575" stroke="#757575" points="1710.33,-505.28 1707.41,-495.1 1703.34,-504.87 1710.33,-505.28"/>
+</g>
+<!-- reader/NodeBufferReader.js -->
+<g id="node28" class="node">
+<title>reader/NodeBufferReader.js</title>
+<path fill="none" stroke="#c6c5fe" d="M917.83,-688C917.83,-688 748.17,-688 748.17,-688 744.33,-688 740.5,-684.17 740.5,-680.33 740.5,-680.33 740.5,-672.67 740.5,-672.67 740.5,-668.83 744.33,-665 748.17,-665 748.17,-665 917.83,-665 917.83,-665 921.67,-665 925.5,-668.83 925.5,-672.67 925.5,-672.67 925.5,-680.33 925.5,-680.33 925.5,-684.17 921.67,-688 917.83,-688"/>
+<text text-anchor="middle" x="833" y="-672.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">reader/NodeBufferReader.js</text>
+</g>
+<!-- reader/NodeBufferReader.js&#45;&gt;utils.js -->
+<g id="edge31" class="edge">
+<title>reader/NodeBufferReader.js&#45;&gt;utils.js</title>
+<path fill="none" stroke="#757575" d="M925.72,-679.23C1125.93,-684.4 1588.79,-691.66 1646,-650.5 1693.09,-616.62 1704.46,-542.72 1707.17,-505.64"/>
+<polygon fill="#757575" stroke="#757575" points="1710.68,-505.59 1707.77,-495.4 1703.69,-505.18 1710.68,-505.59"/>
+</g>
+<!-- reader/Uint8ArrayReader.js -->
+<g id="node29" class="node">
+<title>reader/Uint8ArrayReader.js</title>
+<path fill="none" stroke="#c6c5fe" d="M1159.33,-726C1159.33,-726 994.67,-726 994.67,-726 990.83,-726 987,-722.17 987,-718.33 987,-718.33 987,-710.67 987,-710.67 987,-706.83 990.83,-703 994.67,-703 994.67,-703 1159.33,-703 1159.33,-703 1163.17,-703 1167,-706.83 1167,-710.67 1167,-710.67 1167,-718.33 1167,-718.33 1167,-722.17 1163.17,-726 1159.33,-726"/>
+<text text-anchor="middle" x="1077" y="-710.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">reader/Uint8ArrayReader.js</text>
+</g>
+<!-- reader/NodeBufferReader.js&#45;&gt;reader/Uint8ArrayReader.js -->
+<g id="edge30" class="edge">
+<title>reader/NodeBufferReader.js&#45;&gt;reader/Uint8ArrayReader.js</title>
+<path fill="none" stroke="#757575" d="M907.26,-688C934.15,-692.23 964.78,-697.04 992.5,-701.39"/>
+<polygon fill="#757575" stroke="#757575" points="992.15,-704.88 1002.57,-702.97 993.23,-697.96 992.15,-704.88"/>
+</g>
+<!-- reader/Uint8ArrayReader.js&#45;&gt;utils.js -->
+<g id="edge35" class="edge">
+<title>reader/Uint8ArrayReader.js&#45;&gt;utils.js</title>
+<path fill="none" stroke="#757575" d="M1167.07,-722.47C1307.97,-733.13 1573.75,-744.74 1646,-688.5 1703.1,-644.05 1709.22,-548.71 1708.86,-505.54"/>
+<polygon fill="#757575" stroke="#757575" points="1712.36,-505.22 1708.65,-495.3 1705.36,-505.37 1712.36,-505.22"/>
+</g>
+<!-- reader/Uint8ArrayReader.js&#45;&gt;reader/ArrayReader.js -->
+<g id="edge34" class="edge">
+<title>reader/Uint8ArrayReader.js&#45;&gt;reader/ArrayReader.js</title>
+<path fill="none" stroke="#757575" d="M1108.91,-726.04C1144.27,-739.25 1202.3,-760.92 1241.19,-775.44"/>
+<polygon fill="#757575" stroke="#757575" points="1240.04,-778.75 1250.64,-778.97 1242.49,-772.2 1240.04,-778.75"/>
+</g>
+<!-- reader/StringReader.js -->
+<g id="node30" class="node">
+<title>reader/StringReader.js</title>
+<path fill="none" stroke="#c6c5fe" d="M1350.83,-843C1350.83,-843 1214.17,-843 1214.17,-843 1210.33,-843 1206.5,-839.17 1206.5,-835.33 1206.5,-835.33 1206.5,-827.67 1206.5,-827.67 1206.5,-823.83 1210.33,-820 1214.17,-820 1214.17,-820 1350.83,-820 1350.83,-820 1354.67,-820 1358.5,-823.83 1358.5,-827.67 1358.5,-827.67 1358.5,-835.33 1358.5,-835.33 1358.5,-839.17 1354.67,-843 1350.83,-843"/>
+<text text-anchor="middle" x="1282.5" y="-827.8" font-family="Arial" font-size="14.00" fill="#c6c5fe">reader/StringReader.js</text>
+</g>
+<!-- reader/StringReader.js&#45;&gt;utils.js -->
+<g id="edge33" class="edge">
+<title>reader/StringReader.js&#45;&gt;utils.js</title>
+<path fill="none" stroke="#757575" d="M1308.71,-843.08C1373.39,-870.87 1547.09,-933.07 1646,-849.5 1698.92,-804.79 1706.68,-577.03 1707.81,-505.22"/>
+<polygon fill="#757575" stroke="#757575" points="1711.31,-505.13 1707.94,-495.08 1704.31,-505.04 1711.31,-505.13"/>
+</g>
+<!-- reader/StringReader.js&#45;&gt;reader/DataReader.js -->
+<g id="edge32" class="edge">
+<title>reader/StringReader.js&#45;&gt;reader/DataReader.js</title>
+<path fill="none" stroke="#757575" d="M1358.65,-830.55C1384.2,-830.23 1412.88,-829.87 1438.95,-829.54"/>
+<polygon fill="#757575" stroke="#757575" points="1439.22,-833.03 1449.18,-829.41 1439.13,-826.03 1439.22,-833.03"/>
+</g>
+<!-- reader/readerFor.js&#45;&gt;utils.js -->
+<g id="edge40" class="edge">
+<title>reader/readerFor.js&#45;&gt;utils.js</title>
+<path fill="none" stroke="#757575" d="M679,-725.88C690.95,-727.32 703.33,-728.62 715,-729.5 835.58,-738.6 866.09,-733.89 987,-735.5 1060.23,-736.48 1589.14,-769.66 1646,-723.5 1680.16,-695.77 1699.3,-559.25 1705.63,-505.4"/>
+<polygon fill="#757575" stroke="#757575" points="1709.13,-505.56 1706.78,-495.23 1702.17,-504.77 1709.13,-505.56"/>
+</g>
+<!-- reader/readerFor.js&#45;&gt;reader/ArrayReader.js -->
+<g id="edge36" class="edge">
+<title>reader/readerFor.js&#45;&gt;reader/ArrayReader.js</title>
+<path fill="none" stroke="#757575" d="M631.34,-728.18C650.57,-740.78 683.47,-760.17 715,-768.5 881.57,-812.48 1085.19,-806.6 1197.33,-798.49"/>
+<polygon fill="#757575" stroke="#757575" points="1197.85,-801.96 1207.56,-797.72 1197.33,-794.98 1197.85,-801.96"/>
+</g>
+<!-- reader/readerFor.js&#45;&gt;reader/NodeBufferReader.js -->
+<g id="edge37" class="edge">
+<title>reader/readerFor.js&#45;&gt;reader/NodeBufferReader.js</title>
+<path fill="none" stroke="#757575" d="M677.12,-704.98C702.82,-700.26 732.75,-694.75 759.41,-689.85"/>
+<polygon fill="#757575" stroke="#757575" points="760.25,-693.26 769.45,-688 758.98,-686.37 760.25,-693.26"/>
+</g>
+<!-- reader/readerFor.js&#45;&gt;reader/Uint8ArrayReader.js -->
+<g id="edge39" class="edge">
+<title>reader/readerFor.js&#45;&gt;reader/Uint8ArrayReader.js</title>
+<path fill="none" stroke="#757575" d="M679.16,-716.22C756.43,-715.88 886.56,-715.32 976.64,-714.93"/>
+<polygon fill="#757575" stroke="#757575" points="976.88,-718.43 986.87,-714.89 976.85,-711.43 976.88,-718.43"/>
+</g>
+<!-- reader/readerFor.js&#45;&gt;reader/StringReader.js -->
+<g id="edge38" class="edge">
+<title>reader/readerFor.js&#45;&gt;reader/StringReader.js</title>
+<path fill="none" stroke="#757575" d="M624.19,-728.1C640.63,-746.82 676.03,-783.15 715,-798.5 875.83,-861.85 1082.3,-853.86 1196.31,-842.57"/>
+<polygon fill="#757575" stroke="#757575" points="1196.79,-846.04 1206.38,-841.54 1196.07,-839.07 1196.79,-846.04"/>
+</g>
+<!-- signature.js -->
+<g id="node32" class="node">
+<title>signature.js</title>
+<path fill="none" stroke="#cfffac" d="M125.83,-593C125.83,-593 55.17,-593 55.17,-593 51.33,-593 47.5,-589.17 47.5,-585.33 47.5,-585.33 47.5,-577.67 47.5,-577.67 47.5,-573.83 51.33,-570 55.17,-570 55.17,-570 125.83,-570 125.83,-570 129.67,-570 133.5,-573.83 133.5,-577.67 133.5,-577.67 133.5,-585.33 133.5,-585.33 133.5,-589.17 129.67,-593 125.83,-593"/>
+<text text-anchor="middle" x="90.5" y="-577.8" font-family="Arial" font-size="14.00" fill="#cfffac">signature.js</text>
+</g>
+<!-- stream/ConvertWorker.js&#45;&gt;utils.js -->
+<g id="edge41" class="edge">
+<title>stream/ConvertWorker.js&#45;&gt;utils.js</title>
+<path fill="none" stroke="#757575" d="M1605.11,-10.29C1620.43,-13.8 1635.05,-20.27 1646,-31.5 1706.48,-93.51 1708.87,-379.48 1708.29,-461.47"/>
+<polygon fill="#757575" stroke="#757575" points="1704.79,-461.61 1708.19,-471.64 1711.78,-461.68 1704.79,-461.61"/>
+</g>
+<!-- stream/GenericWorker.js -->
+<g id="node34" class="node">
+<title>stream/GenericWorker.js</title>
+<path fill="none" stroke="#cfffac" d="M166.33,-634C166.33,-634 14.67,-634 14.67,-634 10.83,-634 7,-630.17 7,-626.33 7,-626.33 7,-618.67 7,-618.67 7,-614.83 10.83,-611 14.67,-611 14.67,-611 166.33,-611 166.33,-611 170.17,-611 174,-614.83 174,-618.67 174,-618.67 174,-626.33 174,-626.33 174,-630.17 170.17,-634 166.33,-634"/>
+<text text-anchor="middle" x="90.5" y="-618.8" font-family="Arial" font-size="14.00" fill="#cfffac">stream/GenericWorker.js</text>
+</g>
+<!-- support.js -->
+<g id="node35" class="node">
+<title>support.js</title>
+<path fill="none" stroke="#cfffac" d="M120.33,-675C120.33,-675 60.67,-675 60.67,-675 56.83,-675 53,-671.17 53,-667.33 53,-667.33 53,-659.67 53,-659.67 53,-655.83 56.83,-652 60.67,-652 60.67,-652 120.33,-652 120.33,-652 124.17,-652 128,-655.83 128,-659.67 128,-659.67 128,-667.33 128,-667.33 128,-671.17 124.17,-675 120.33,-675"/>
+<text text-anchor="middle" x="90.5" y="-659.8" font-family="Arial" font-size="14.00" fill="#cfffac">support.js</text>
+</g>
+<!-- zipEntry.js&#45;&gt;compressedObject.js -->
+<g id="edge53" class="edge">
+<title>zipEntry.js&#45;&gt;compressedObject.js</title>
+<path fill="none" stroke="#757575" d="M481.75,-584.89C494.31,-566.55 520.51,-530.33 548,-504.5 614.23,-442.27 632.98,-425.64 715,-386.5 808.66,-341.8 925.59,-313.23 1000.53,-298.03"/>
+<polygon fill="#757575" stroke="#757575" points="1001.25,-301.45 1010.37,-296.06 999.87,-294.59 1001.25,-301.45"/>
+</g>
+<!-- zipEntry.js&#45;&gt;compressions.js -->
+<g id="edge54" class="edge">
+<title>zipEntry.js&#45;&gt;compressions.js</title>
+<path fill="none" stroke="#757575" d="M512.11,-596.5C612.17,-596.5 883.28,-596.5 1009.58,-596.5"/>
+<polygon fill="#757575" stroke="#757575" points="1009.88,-600 1019.88,-596.5 1009.88,-593 1009.88,-600"/>
+</g>
+<!-- zipEntry.js&#45;&gt;crc32.js -->
+<g id="edge55" class="edge">
+<title>zipEntry.js&#45;&gt;crc32.js</title>
+<path fill="none" stroke="#757575" d="M512.19,-592.38C675.12,-574.79 1311.33,-506.13 1479.85,-487.94"/>
+<polygon fill="#757575" stroke="#757575" points="1480.47,-491.39 1490.04,-486.84 1479.72,-484.44 1480.47,-491.39"/>
+</g>
+<!-- zipEntry.js&#45;&gt;utils.js -->
+<g id="edge58" class="edge">
+<title>zipEntry.js&#45;&gt;utils.js</title>
+<path fill="none" stroke="#757575" d="M512.29,-604.47C523.74,-606.65 536.35,-608.87 548,-610.5 673.53,-628.08 705.25,-636.5 832,-636.5 832,-636.5 832,-636.5 1283.5,-636.5 1364.17,-636.5 1579.19,-662.71 1646,-617.5 1684,-591.78 1699.17,-536.51 1704.86,-505.59"/>
+<polygon fill="#757575" stroke="#757575" points="1708.37,-505.84 1706.55,-495.4 1701.46,-504.69 1708.37,-505.84"/>
+</g>
+<!-- zipEntry.js&#45;&gt;utf8.js -->
+<g id="edge57" class="edge">
+<title>zipEntry.js&#45;&gt;utf8.js</title>
+<path fill="none" stroke="#757575" d="M488.53,-584.79C503.1,-573.32 526.42,-555.68 548,-542.5 618.91,-499.2 636.06,-484.39 715,-458.5 815.21,-425.63 1136.39,-381.56 1245.12,-367.22"/>
+<polygon fill="#757575" stroke="#757575" points="1245.8,-370.66 1255.26,-365.89 1244.89,-363.72 1245.8,-370.66"/>
+</g>
+<!-- zipEntry.js&#45;&gt;reader/readerFor.js -->
+<g id="edge56" class="edge">
+<title>zipEntry.js&#45;&gt;reader/readerFor.js</title>
+<path fill="none" stroke="#757575" d="M487.28,-608.01C511.31,-628.83 562.21,-672.93 591.17,-698.02"/>
+<polygon fill="#757575" stroke="#757575" points="589.21,-700.95 599.06,-704.85 593.79,-695.66 589.21,-700.95"/>
+</g>
+</g>
+</svg>

+ 330 - 0
libs/jszip/index.d.ts

@@ -0,0 +1,330 @@
+// Type definitions for JSZip 3.1
+// Project: http://stuk.github.com/jszip/, https://github.com/stuk/jszip
+// Definitions by: mzeiher <https://github.com/mzeiher>, forabi <https://github.com/forabi>
+// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
+// TypeScript Version: 2.3
+
+/// <reference types="node" />
+
+interface JSZipSupport {
+    arraybuffer: boolean;
+    uint8array: boolean;
+    blob: boolean;
+    nodebuffer: boolean;
+}
+
+type Compression = 'STORE' | 'DEFLATE';
+
+/**
+ * Depends on the compression type. With `STORE` (no compression), these options are ignored. With
+ * `DEFLATE`, you can give the compression level between 1 (best speed) and 9 (best compression).
+ */
+interface CompressionOptions {
+    level: number;
+}
+
+interface InputByType {
+    base64: string;
+    string: string;
+    text: string;
+    binarystring: string;
+    array: number[];
+    uint8array: Uint8Array;
+    arraybuffer: ArrayBuffer;
+    blob: Blob;
+    stream: NodeJS.ReadableStream;
+}
+
+interface OutputByType {
+    base64: string;
+    string: string;
+    text: string;
+    binarystring: string;
+    array: number[];
+    uint8array: Uint8Array;
+    arraybuffer: ArrayBuffer;
+    blob: Blob;
+    nodebuffer: Buffer;
+}
+
+// This private `_data` property on a JSZipObject uses this interface.
+// If/when it is made public this should be uncommented.
+// interface CompressedObject {
+//     compressedSize: number;
+//     uncompressedSize: number;
+//     crc32: number;
+//     compression: object;
+//     compressedContent: string|ArrayBuffer|Uint8Array|Buffer;
+// }
+
+type InputFileFormat = InputByType[keyof InputByType] | Promise<InputByType[keyof InputByType]>;
+
+declare namespace JSZip {
+    type InputType = keyof InputByType;
+
+    type OutputType = keyof OutputByType;
+
+    interface JSZipMetadata {
+        percent: number;
+        currentFile: string | null;
+    }
+
+    type OnUpdateCallback = (metadata: JSZipMetadata) => void;
+
+    interface JSZipObject {
+        name: string;
+        /**
+         * Present for files loadded with `loadAsync`. May contain ".." path components that could
+         * result in a zip-slip attack. See https://snyk.io/research/zip-slip-vulnerability
+         */
+        unsafeOriginalName?: string;
+        dir: boolean;
+        date: Date;
+        comment: string;
+        /** The UNIX permissions of the file, if any. */
+        unixPermissions: number | string | null;
+        /** The UNIX permissions of the file, if any. */
+        dosPermissions: number | null;
+        options: JSZipObjectOptions;
+
+        /**
+         * Prepare the content in the asked type.
+         * @param type the type of the result.
+         * @param onUpdate a function to call on each internal update.
+         * @return Promise the promise of the result.
+         */
+        async<T extends OutputType>(type: T, onUpdate?: OnUpdateCallback): Promise<OutputByType[T]>;
+        nodeStream(type?: 'nodebuffer', onUpdate?: OnUpdateCallback): NodeJS.ReadableStream;
+    }
+
+    interface JSZipFileOptions {
+        /** Set to `true` if the data is `base64` encoded. For example image data from a `<canvas>` element. Plain text and HTML do not need this option. */
+        base64?: boolean;
+        /**
+         * Set to `true` if the data should be treated as raw content, `false` if this is a text. If `base64` is used,
+         * this defaults to `true`, if the data is not a `string`, this will be set to `true`.
+         */
+        binary?: boolean;
+        /**
+         * The last modification date, defaults to the current date.
+         */
+        date?: Date;
+        /**
+         * Sets per file compression. The `compressionOptions` parameter depends on the compression type.
+         */
+        compression?: Compression;
+        /**
+         * Sets per file compression level for `DEFLATE` compression.
+         */
+        compressionOptions?: null | CompressionOptions;
+        comment?: string;
+        /** Set to `true` if (and only if) the input is a "binary string" and has already been prepared with a `0xFF` mask. */
+        optimizedBinaryString?: boolean;
+        /** Set to `true` if folders in the file path should be automatically created, otherwise there will only be virtual folders that represent the path to the file. */
+        createFolders?: boolean;
+        /** Set to `true` if this is a directory and content should be ignored. */
+        dir?: boolean;
+
+        /** 6 bits number. The DOS permissions of the file, if any. */
+        dosPermissions?: number | null;
+        /**
+         * 16 bits number. The UNIX permissions of the file, if any.
+         * Also accepts a `string` representing the octal value: `"644"`, `"755"`, etc.
+         */
+        unixPermissions?: number | string | null;
+    }
+
+    interface JSZipObjectOptions {
+        compression: Compression;
+    }
+
+    interface JSZipGeneratorOptions<T extends OutputType = OutputType> {
+        /**
+         * Sets compression option for all entries that have not specified their own `compression` option
+         */
+        compression?: Compression;
+        /**
+         * Sets compression level for `DEFLATE` compression.
+         */
+        compressionOptions?: null | CompressionOptions;
+        type?: T;
+        comment?: string;
+        /**
+         * mime-type for the generated file.
+         * Useful when you need to generate a file with a different extension, ie: “.ods”.
+         * @default 'application/zip'
+         */
+        mimeType?: string;
+        encodeFileName?(filename: string): string;
+        /** Stream the files and create file descriptors */
+        streamFiles?: boolean;
+        /** DOS (default) or UNIX */
+        platform?: 'DOS' | 'UNIX';
+    }
+
+    interface JSZipLoadOptions {
+        base64?: boolean;
+        checkCRC32?: boolean;
+        optimizedBinaryString?: boolean;
+        createFolders?: boolean;
+        decodeFileName?: (bytes: string[] | Uint8Array | Buffer) => string;
+    }
+
+    type DataEventCallback<T> = (dataChunk: T, metadata: JSZipMetadata) => void
+    type EndEventCallback = () => void
+    type ErrorEventCallback = (error: Error) => void
+
+    interface JSZipStreamHelper<T> {
+        /**
+         * Register a listener on an event
+         */
+        on(event: 'data', callback: DataEventCallback<T>): this;
+        on(event: 'end', callback: EndEventCallback): this;
+        on(event: 'error', callback: ErrorEventCallback): this;
+
+        /**
+         * Read the whole stream and call a callback with the complete content
+         *
+         * @param updateCallback The function called every time the stream updates
+         * @return A Promise of the full content
+         */
+        accumulate(updateCallback?: (metadata: JSZipMetadata) => void): Promise<T>;
+
+        /**
+         * Resume the stream if the stream is paused. Once resumed, the stream starts sending data events again
+         *
+         * @return The current StreamHelper object, for chaining
+         */
+        resume(): this;
+
+        /**
+         * Pause the stream if the stream is running. Once paused, the stream stops sending data events
+         *
+         * @return The current StreamHelper object, for chaining
+         */
+        pause(): this;
+    }
+}
+
+interface JSZip {
+    files: {[key: string]: JSZip.JSZipObject};
+
+    /**
+     * Get a file from the archive
+     *
+     * @param Path relative path to file
+     * @return File matching path, null if no file found
+     */
+    file(path: string): JSZip.JSZipObject | null;
+
+    /**
+     * Get files matching a RegExp from archive
+     *
+     * @param path RegExp to match
+     * @return Return all matching files or an empty array
+     */
+    file(path: RegExp): JSZip.JSZipObject[];
+
+    /**
+     * Add a file to the archive
+     *
+     * @param path Relative path to file
+     * @param data Content of the file
+     * @param options Optional information about the file
+     * @return JSZip object
+     */
+    file<T extends JSZip.InputType>(path: string, data: InputByType[T] | Promise<InputByType[T]>, options?: JSZip.JSZipFileOptions): this;
+    file<T extends JSZip.InputType>(path: string, data: null, options?: JSZip.JSZipFileOptions & { dir: true }): this;
+
+    /**
+     * Returns an new JSZip instance with the given folder as root
+     *
+     * @param name Name of the folder
+     * @return New JSZip object with the given folder as root or null
+     */
+    folder(name: string): JSZip | null;
+
+    /**
+     * Returns new JSZip instances with the matching folders as root
+     *
+     * @param name RegExp to match
+     * @return New array of JSZipFile objects which match the RegExp
+     */
+    folder(name: RegExp): JSZip.JSZipObject[];
+
+    /**
+     * Call a callback function for each entry at this folder level.
+     *
+     * @param callback function
+     */
+    forEach(callback: (relativePath: string, file: JSZip.JSZipObject) => void): void;
+
+    /**
+     * Get all files which match the given filter function
+     *
+     * @param predicate Filter function
+     * @return Array of matched elements
+     */
+    filter(predicate: (relativePath: string, file: JSZip.JSZipObject) => boolean): JSZip.JSZipObject[];
+
+    /**
+     * Removes the file or folder from the archive
+     *
+     * @param path Relative path of file or folder
+     * @return Returns the JSZip instance
+     */
+    remove(path: string): JSZip;
+
+    /**
+     * Generates a new archive asynchronously
+     *
+     * @param options Optional options for the generator
+     * @param onUpdate The optional function called on each internal update with the metadata.
+     * @return The serialized archive
+     */
+    generateAsync<T extends JSZip.OutputType>(options?: JSZip.JSZipGeneratorOptions<T>, onUpdate?: JSZip.OnUpdateCallback): Promise<OutputByType[T]>;
+
+    /**
+     * Generates a new archive asynchronously
+     *
+     * @param options Optional options for the generator
+     * @param onUpdate The optional function called on each internal update with the metadata.
+     * @return A Node.js `ReadableStream`
+     */
+    generateNodeStream(options?: JSZip.JSZipGeneratorOptions<'nodebuffer'>, onUpdate?: JSZip.OnUpdateCallback): NodeJS.ReadableStream;
+
+    /**
+     * Generates the complete zip file with the internal stream implementation
+     *
+     * @param options Optional options for the generator
+     * @return a StreamHelper
+     */
+    generateInternalStream<T extends JSZip.OutputType>(options?: JSZip.JSZipGeneratorOptions<T>): JSZip.JSZipStreamHelper<OutputByType[T]>;
+
+    /**
+     * Deserialize zip file asynchronously
+     *
+     * @param data Serialized zip file
+     * @param options Options for deserializing
+     * @return Returns promise
+     */
+    loadAsync(data: InputFileFormat, options?: JSZip.JSZipLoadOptions): Promise<JSZip>;
+
+    /**
+     * Create JSZip instance
+     */
+    new(): this;
+
+    (): JSZip;
+
+    prototype: JSZip;
+    support: JSZipSupport;
+    external: {
+        Promise: PromiseConstructorLike;
+    };
+    version: string;
+}
+
+declare var JSZip: JSZip;
+
+export = JSZip;

+ 106 - 0
libs/jszip/lib/base64.js

@@ -0,0 +1,106 @@
+"use strict";
+var utils = require("./utils");
+var support = require("./support");
+// private property
+var _keyStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
+
+
+// public method for encoding
+exports.encode = function(input) {
+    var output = [];
+    var chr1, chr2, chr3, enc1, enc2, enc3, enc4;
+    var i = 0, len = input.length, remainingBytes = len;
+
+    var isArray = utils.getTypeOf(input) !== "string";
+    while (i < input.length) {
+        remainingBytes = len - i;
+
+        if (!isArray) {
+            chr1 = input.charCodeAt(i++);
+            chr2 = i < len ? input.charCodeAt(i++) : 0;
+            chr3 = i < len ? input.charCodeAt(i++) : 0;
+        } else {
+            chr1 = input[i++];
+            chr2 = i < len ? input[i++] : 0;
+            chr3 = i < len ? input[i++] : 0;
+        }
+
+        enc1 = chr1 >> 2;
+        enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);
+        enc3 = remainingBytes > 1 ? (((chr2 & 15) << 2) | (chr3 >> 6)) : 64;
+        enc4 = remainingBytes > 2 ? (chr3 & 63) : 64;
+
+        output.push(_keyStr.charAt(enc1) + _keyStr.charAt(enc2) + _keyStr.charAt(enc3) + _keyStr.charAt(enc4));
+
+    }
+
+    return output.join("");
+};
+
+// public method for decoding
+exports.decode = function(input) {
+    var chr1, chr2, chr3;
+    var enc1, enc2, enc3, enc4;
+    var i = 0, resultIndex = 0;
+
+    var dataUrlPrefix = "data:";
+
+    if (input.substr(0, dataUrlPrefix.length) === dataUrlPrefix) {
+        // This is a common error: people give a data url
+        // (data:image/png;base64,iVBOR...) with a {base64: true} and
+        // wonders why things don't work.
+        // We can detect that the string input looks like a data url but we
+        // *can't* be sure it is one: removing everything up to the comma would
+        // be too dangerous.
+        throw new Error("Invalid base64 input, it looks like a data url.");
+    }
+
+    input = input.replace(/[^A-Za-z0-9+/=]/g, "");
+
+    var totalLength = input.length * 3 / 4;
+    if(input.charAt(input.length - 1) === _keyStr.charAt(64)) {
+        totalLength--;
+    }
+    if(input.charAt(input.length - 2) === _keyStr.charAt(64)) {
+        totalLength--;
+    }
+    if (totalLength % 1 !== 0) {
+        // totalLength is not an integer, the length does not match a valid
+        // base64 content. That can happen if:
+        // - the input is not a base64 content
+        // - the input is *almost* a base64 content, with a extra chars at the
+        //   beginning or at the end
+        // - the input uses a base64 variant (base64url for example)
+        throw new Error("Invalid base64 input, bad content length.");
+    }
+    var output;
+    if (support.uint8array) {
+        output = new Uint8Array(totalLength|0);
+    } else {
+        output = new Array(totalLength|0);
+    }
+
+    while (i < input.length) {
+
+        enc1 = _keyStr.indexOf(input.charAt(i++));
+        enc2 = _keyStr.indexOf(input.charAt(i++));
+        enc3 = _keyStr.indexOf(input.charAt(i++));
+        enc4 = _keyStr.indexOf(input.charAt(i++));
+
+        chr1 = (enc1 << 2) | (enc2 >> 4);
+        chr2 = ((enc2 & 15) << 4) | (enc3 >> 2);
+        chr3 = ((enc3 & 3) << 6) | enc4;
+
+        output[resultIndex++] = chr1;
+
+        if (enc3 !== 64) {
+            output[resultIndex++] = chr2;
+        }
+        if (enc4 !== 64) {
+            output[resultIndex++] = chr3;
+        }
+
+    }
+
+    return output;
+};

+ 74 - 0
libs/jszip/lib/compressedObject.js

@@ -0,0 +1,74 @@
+"use strict";
+
+var external = require("./external");
+var DataWorker = require("./stream/DataWorker");
+var Crc32Probe = require("./stream/Crc32Probe");
+var DataLengthProbe = require("./stream/DataLengthProbe");
+
+/**
+ * Represent a compressed object, with everything needed to decompress it.
+ * @constructor
+ * @param {number} compressedSize the size of the data compressed.
+ * @param {number} uncompressedSize the size of the data after decompression.
+ * @param {number} crc32 the crc32 of the decompressed file.
+ * @param {object} compression the type of compression, see lib/compressions.js.
+ * @param {String|ArrayBuffer|Uint8Array|Buffer} data the compressed data.
+ */
+function CompressedObject(compressedSize, uncompressedSize, crc32, compression, data) {
+    this.compressedSize = compressedSize;
+    this.uncompressedSize = uncompressedSize;
+    this.crc32 = crc32;
+    this.compression = compression;
+    this.compressedContent = data;
+}
+
+CompressedObject.prototype = {
+    /**
+     * Create a worker to get the uncompressed content.
+     * @return {GenericWorker} the worker.
+     */
+    getContentWorker: function () {
+        var worker = new DataWorker(external.Promise.resolve(this.compressedContent))
+            .pipe(this.compression.uncompressWorker())
+            .pipe(new DataLengthProbe("data_length"));
+
+        var that = this;
+        worker.on("end", function () {
+            if (this.streamInfo["data_length"] !== that.uncompressedSize) {
+                throw new Error("Bug : uncompressed data size mismatch");
+            }
+        });
+        return worker;
+    },
+    /**
+     * Create a worker to get the compressed content.
+     * @return {GenericWorker} the worker.
+     */
+    getCompressedWorker: function () {
+        return new DataWorker(external.Promise.resolve(this.compressedContent))
+            .withStreamInfo("compressedSize", this.compressedSize)
+            .withStreamInfo("uncompressedSize", this.uncompressedSize)
+            .withStreamInfo("crc32", this.crc32)
+            .withStreamInfo("compression", this.compression)
+        ;
+    }
+};
+
+/**
+ * Chain the given worker with other workers to compress the content with the
+ * given compression.
+ * @param {GenericWorker} uncompressedWorker the worker to pipe.
+ * @param {Object} compression the compression object.
+ * @param {Object} compressionOptions the options to use when compressing.
+ * @return {GenericWorker} the new worker compressing the content.
+ */
+CompressedObject.createWorkerFrom = function (uncompressedWorker, compression, compressionOptions) {
+    return uncompressedWorker
+        .pipe(new Crc32Probe())
+        .pipe(new DataLengthProbe("uncompressedSize"))
+        .pipe(compression.compressWorker(compressionOptions))
+        .pipe(new DataLengthProbe("compressedSize"))
+        .withStreamInfo("compression", compression);
+};
+
+module.exports = CompressedObject;

+ 14 - 0
libs/jszip/lib/compressions.js

@@ -0,0 +1,14 @@
+"use strict";
+
+var GenericWorker = require("./stream/GenericWorker");
+
+exports.STORE = {
+    magic: "\x00\x00",
+    compressWorker : function () {
+        return new GenericWorker("STORE compression");
+    },
+    uncompressWorker : function () {
+        return new GenericWorker("STORE decompression");
+    }
+};
+exports.DEFLATE = require("./flate");

+ 77 - 0
libs/jszip/lib/crc32.js

@@ -0,0 +1,77 @@
+"use strict";
+
+var utils = require("./utils");
+
+/**
+ * The following functions come from pako, from pako/lib/zlib/crc32.js
+ * released under the MIT license, see pako https://github.com/nodeca/pako/
+ */
+
+// Use ordinary array, since untyped makes no boost here
+function makeTable() {
+    var c, table = [];
+
+    for(var n =0; n < 256; n++){
+        c = n;
+        for(var k =0; k < 8; k++){
+            c = ((c&1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1));
+        }
+        table[n] = c;
+    }
+
+    return table;
+}
+
+// Create table on load. Just 255 signed longs. Not a problem.
+var crcTable = makeTable();
+
+
+function crc32(crc, buf, len, pos) {
+    var t = crcTable, end = pos + len;
+
+    crc = crc ^ (-1);
+
+    for (var i = pos; i < end; i++ ) {
+        crc = (crc >>> 8) ^ t[(crc ^ buf[i]) & 0xFF];
+    }
+
+    return (crc ^ (-1)); // >>> 0;
+}
+
+// That's all for the pako functions.
+
+/**
+ * Compute the crc32 of a string.
+ * This is almost the same as the function crc32, but for strings. Using the
+ * same function for the two use cases leads to horrible performances.
+ * @param {Number} crc the starting value of the crc.
+ * @param {String} str the string to use.
+ * @param {Number} len the length of the string.
+ * @param {Number} pos the starting position for the crc32 computation.
+ * @return {Number} the computed crc32.
+ */
+function crc32str(crc, str, len, pos) {
+    var t = crcTable, end = pos + len;
+
+    crc = crc ^ (-1);
+
+    for (var i = pos; i < end; i++ ) {
+        crc = (crc >>> 8) ^ t[(crc ^ str.charCodeAt(i)) & 0xFF];
+    }
+
+    return (crc ^ (-1)); // >>> 0;
+}
+
+module.exports = function crc32wrapper(input, crc) {
+    if (typeof input === "undefined" || !input.length) {
+        return 0;
+    }
+
+    var isArray = utils.getTypeOf(input) !== "string";
+
+    if(isArray) {
+        return crc32(crc|0, input, input.length, 0);
+    } else {
+        return crc32str(crc|0, input, input.length, 0);
+    }
+};

+ 11 - 0
libs/jszip/lib/defaults.js

@@ -0,0 +1,11 @@
+"use strict";
+exports.base64 = false;
+exports.binary = false;
+exports.dir = false;
+exports.createFolders = true;
+exports.date = null;
+exports.compression = null;
+exports.compressionOptions = null;
+exports.comment = null;
+exports.unixPermissions = null;
+exports.dosPermissions = null;

+ 18 - 0
libs/jszip/lib/external.js

@@ -0,0 +1,18 @@
+"use strict";
+
+// load the global object first:
+// - it should be better integrated in the system (unhandledRejection in node)
+// - the environment may have a custom Promise implementation (see zone.js)
+var ES6Promise = null;
+if (typeof Promise !== "undefined") {
+    ES6Promise = Promise;
+} else {
+    ES6Promise = require("lie");
+}
+
+/**
+ * Let the user use/change some implementations.
+ */
+module.exports = {
+    Promise: ES6Promise
+};

+ 85 - 0
libs/jszip/lib/flate.js

@@ -0,0 +1,85 @@
+"use strict";
+var USE_TYPEDARRAY = (typeof Uint8Array !== "undefined") && (typeof Uint16Array !== "undefined") && (typeof Uint32Array !== "undefined");
+
+var pako = require("pako");
+var utils = require("./utils");
+var GenericWorker = require("./stream/GenericWorker");
+
+var ARRAY_TYPE = USE_TYPEDARRAY ? "uint8array" : "array";
+
+exports.magic = "\x08\x00";
+
+/**
+ * Create a worker that uses pako to inflate/deflate.
+ * @constructor
+ * @param {String} action the name of the pako function to call : either "Deflate" or "Inflate".
+ * @param {Object} options the options to use when (de)compressing.
+ */
+function FlateWorker(action, options) {
+    GenericWorker.call(this, "FlateWorker/" + action);
+
+    this._pako = null;
+    this._pakoAction = action;
+    this._pakoOptions = options;
+    // the `meta` object from the last chunk received
+    // this allow this worker to pass around metadata
+    this.meta = {};
+}
+
+utils.inherits(FlateWorker, GenericWorker);
+
+/**
+ * @see GenericWorker.processChunk
+ */
+FlateWorker.prototype.processChunk = function (chunk) {
+    this.meta = chunk.meta;
+    if (this._pako === null) {
+        this._createPako();
+    }
+    this._pako.push(utils.transformTo(ARRAY_TYPE, chunk.data), false);
+};
+
+/**
+ * @see GenericWorker.flush
+ */
+FlateWorker.prototype.flush = function () {
+    GenericWorker.prototype.flush.call(this);
+    if (this._pako === null) {
+        this._createPako();
+    }
+    this._pako.push([], true);
+};
+/**
+ * @see GenericWorker.cleanUp
+ */
+FlateWorker.prototype.cleanUp = function () {
+    GenericWorker.prototype.cleanUp.call(this);
+    this._pako = null;
+};
+
+/**
+ * Create the _pako object.
+ * TODO: lazy-loading this object isn't the best solution but it's the
+ * quickest. The best solution is to lazy-load the worker list. See also the
+ * issue #446.
+ */
+FlateWorker.prototype._createPako = function () {
+    this._pako = new pako[this._pakoAction]({
+        raw: true,
+        level: this._pakoOptions.level || -1 // default compression
+    });
+    var self = this;
+    this._pako.onData = function(data) {
+        self.push({
+            data : data,
+            meta : self.meta
+        });
+    };
+};
+
+exports.compressWorker = function (compressionOptions) {
+    return new FlateWorker("Deflate", compressionOptions);
+};
+exports.uncompressWorker = function () {
+    return new FlateWorker("Inflate", {});
+};

+ 539 - 0
libs/jszip/lib/generate/ZipFileWorker.js

@@ -0,0 +1,539 @@
+"use strict";
+
+var utils = require("../utils");
+var GenericWorker = require("../stream/GenericWorker");
+var utf8 = require("../utf8");
+var crc32 = require("../crc32");
+var signature = require("../signature");
+
+/**
+ * Transform an integer into a string in hexadecimal.
+ * @private
+ * @param {number} dec the number to convert.
+ * @param {number} bytes the number of bytes to generate.
+ * @returns {string} the result.
+ */
+var decToHex = function(dec, bytes) {
+    var hex = "", i;
+    for (i = 0; i < bytes; i++) {
+        hex += String.fromCharCode(dec & 0xff);
+        dec = dec >>> 8;
+    }
+    return hex;
+};
+
+/**
+ * Generate the UNIX part of the external file attributes.
+ * @param {Object} unixPermissions the unix permissions or null.
+ * @param {Boolean} isDir true if the entry is a directory, false otherwise.
+ * @return {Number} a 32 bit integer.
+ *
+ * adapted from http://unix.stackexchange.com/questions/14705/the-zip-formats-external-file-attribute :
+ *
+ * TTTTsstrwxrwxrwx0000000000ADVSHR
+ * ^^^^____________________________ file type, see zipinfo.c (UNX_*)
+ *     ^^^_________________________ setuid, setgid, sticky
+ *        ^^^^^^^^^________________ permissions
+ *                 ^^^^^^^^^^______ not used ?
+ *                           ^^^^^^ DOS attribute bits : Archive, Directory, Volume label, System file, Hidden, Read only
+ */
+var generateUnixExternalFileAttr = function (unixPermissions, isDir) {
+
+    var result = unixPermissions;
+    if (!unixPermissions) {
+        // I can't use octal values in strict mode, hence the hexa.
+        //  040775 => 0x41fd
+        // 0100664 => 0x81b4
+        result = isDir ? 0x41fd : 0x81b4;
+    }
+    return (result & 0xFFFF) << 16;
+};
+
+/**
+ * Generate the DOS part of the external file attributes.
+ * @param {Object} dosPermissions the dos permissions or null.
+ * @param {Boolean} isDir true if the entry is a directory, false otherwise.
+ * @return {Number} a 32 bit integer.
+ *
+ * Bit 0     Read-Only
+ * Bit 1     Hidden
+ * Bit 2     System
+ * Bit 3     Volume Label
+ * Bit 4     Directory
+ * Bit 5     Archive
+ */
+var generateDosExternalFileAttr = function (dosPermissions) {
+    // the dir flag is already set for compatibility
+    return (dosPermissions || 0)  & 0x3F;
+};
+
+/**
+ * Generate the various parts used in the construction of the final zip file.
+ * @param {Object} streamInfo the hash with information about the compressed file.
+ * @param {Boolean} streamedContent is the content streamed ?
+ * @param {Boolean} streamingEnded is the stream finished ?
+ * @param {number} offset the current offset from the start of the zip file.
+ * @param {String} platform let's pretend we are this platform (change platform dependents fields)
+ * @param {Function} encodeFileName the function to encode the file name / comment.
+ * @return {Object} the zip parts.
+ */
+var generateZipParts = function(streamInfo, streamedContent, streamingEnded, offset, platform, encodeFileName) {
+    var file = streamInfo["file"],
+        compression = streamInfo["compression"],
+        useCustomEncoding = encodeFileName !== utf8.utf8encode,
+        encodedFileName = utils.transformTo("string", encodeFileName(file.name)),
+        utfEncodedFileName = utils.transformTo("string", utf8.utf8encode(file.name)),
+        comment = file.comment,
+        encodedComment = utils.transformTo("string", encodeFileName(comment)),
+        utfEncodedComment = utils.transformTo("string", utf8.utf8encode(comment)),
+        useUTF8ForFileName = utfEncodedFileName.length !== file.name.length,
+        useUTF8ForComment = utfEncodedComment.length !== comment.length,
+        dosTime,
+        dosDate,
+        extraFields = "",
+        unicodePathExtraField = "",
+        unicodeCommentExtraField = "",
+        dir = file.dir,
+        date = file.date;
+
+
+    var dataInfo = {
+        crc32 : 0,
+        compressedSize : 0,
+        uncompressedSize : 0
+    };
+
+    // if the content is streamed, the sizes/crc32 are only available AFTER
+    // the end of the stream.
+    if (!streamedContent || streamingEnded) {
+        dataInfo.crc32 = streamInfo["crc32"];
+        dataInfo.compressedSize = streamInfo["compressedSize"];
+        dataInfo.uncompressedSize = streamInfo["uncompressedSize"];
+    }
+
+    var bitflag = 0;
+    if (streamedContent) {
+        // Bit 3: the sizes/crc32 are set to zero in the local header.
+        // The correct values are put in the data descriptor immediately
+        // following the compressed data.
+        bitflag |= 0x0008;
+    }
+    if (!useCustomEncoding && (useUTF8ForFileName || useUTF8ForComment)) {
+        // Bit 11: Language encoding flag (EFS).
+        bitflag |= 0x0800;
+    }
+
+
+    var extFileAttr = 0;
+    var versionMadeBy = 0;
+    if (dir) {
+        // dos or unix, we set the dos dir flag
+        extFileAttr |= 0x00010;
+    }
+    if(platform === "UNIX") {
+        versionMadeBy = 0x031E; // UNIX, version 3.0
+        extFileAttr |= generateUnixExternalFileAttr(file.unixPermissions, dir);
+    } else { // DOS or other, fallback to DOS
+        versionMadeBy = 0x0014; // DOS, version 2.0
+        extFileAttr |= generateDosExternalFileAttr(file.dosPermissions, dir);
+    }
+
+    // date
+    // @see http://www.delorie.com/djgpp/doc/rbinter/it/52/13.html
+    // @see http://www.delorie.com/djgpp/doc/rbinter/it/65/16.html
+    // @see http://www.delorie.com/djgpp/doc/rbinter/it/66/16.html
+
+    dosTime = date.getUTCHours();
+    dosTime = dosTime << 6;
+    dosTime = dosTime | date.getUTCMinutes();
+    dosTime = dosTime << 5;
+    dosTime = dosTime | date.getUTCSeconds() / 2;
+
+    dosDate = date.getUTCFullYear() - 1980;
+    dosDate = dosDate << 4;
+    dosDate = dosDate | (date.getUTCMonth() + 1);
+    dosDate = dosDate << 5;
+    dosDate = dosDate | date.getUTCDate();
+
+    if (useUTF8ForFileName) {
+        // set the unicode path extra field. unzip needs at least one extra
+        // field to correctly handle unicode path, so using the path is as good
+        // as any other information. This could improve the situation with
+        // other archive managers too.
+        // This field is usually used without the utf8 flag, with a non
+        // unicode path in the header (winrar, winzip). This helps (a bit)
+        // with the messy Windows' default compressed folders feature but
+        // breaks on p7zip which doesn't seek the unicode path extra field.
+        // So for now, UTF-8 everywhere !
+        unicodePathExtraField =
+            // Version
+            decToHex(1, 1) +
+            // NameCRC32
+            decToHex(crc32(encodedFileName), 4) +
+            // UnicodeName
+            utfEncodedFileName;
+
+        extraFields +=
+            // Info-ZIP Unicode Path Extra Field
+            "\x75\x70" +
+            // size
+            decToHex(unicodePathExtraField.length, 2) +
+            // content
+            unicodePathExtraField;
+    }
+
+    if(useUTF8ForComment) {
+
+        unicodeCommentExtraField =
+            // Version
+            decToHex(1, 1) +
+            // CommentCRC32
+            decToHex(crc32(encodedComment), 4) +
+            // UnicodeName
+            utfEncodedComment;
+
+        extraFields +=
+            // Info-ZIP Unicode Path Extra Field
+            "\x75\x63" +
+            // size
+            decToHex(unicodeCommentExtraField.length, 2) +
+            // content
+            unicodeCommentExtraField;
+    }
+
+    var header = "";
+
+    // version needed to extract
+    header += "\x0A\x00";
+    // general purpose bit flag
+    header += decToHex(bitflag, 2);
+    // compression method
+    header += compression.magic;
+    // last mod file time
+    header += decToHex(dosTime, 2);
+    // last mod file date
+    header += decToHex(dosDate, 2);
+    // crc-32
+    header += decToHex(dataInfo.crc32, 4);
+    // compressed size
+    header += decToHex(dataInfo.compressedSize, 4);
+    // uncompressed size
+    header += decToHex(dataInfo.uncompressedSize, 4);
+    // file name length
+    header += decToHex(encodedFileName.length, 2);
+    // extra field length
+    header += decToHex(extraFields.length, 2);
+
+
+    var fileRecord = signature.LOCAL_FILE_HEADER + header + encodedFileName + extraFields;
+
+    var dirRecord = signature.CENTRAL_FILE_HEADER +
+        // version made by (00: DOS)
+        decToHex(versionMadeBy, 2) +
+        // file header (common to file and central directory)
+        header +
+        // file comment length
+        decToHex(encodedComment.length, 2) +
+        // disk number start
+        "\x00\x00" +
+        // internal file attributes TODO
+        "\x00\x00" +
+        // external file attributes
+        decToHex(extFileAttr, 4) +
+        // relative offset of local header
+        decToHex(offset, 4) +
+        // file name
+        encodedFileName +
+        // extra field
+        extraFields +
+        // file comment
+        encodedComment;
+
+    return {
+        fileRecord: fileRecord,
+        dirRecord: dirRecord
+    };
+};
+
+/**
+ * Generate the EOCD record.
+ * @param {Number} entriesCount the number of entries in the zip file.
+ * @param {Number} centralDirLength the length (in bytes) of the central dir.
+ * @param {Number} localDirLength the length (in bytes) of the local dir.
+ * @param {String} comment the zip file comment as a binary string.
+ * @param {Function} encodeFileName the function to encode the comment.
+ * @return {String} the EOCD record.
+ */
+var generateCentralDirectoryEnd = function (entriesCount, centralDirLength, localDirLength, comment, encodeFileName) {
+    var dirEnd = "";
+    var encodedComment = utils.transformTo("string", encodeFileName(comment));
+
+    // end of central dir signature
+    dirEnd = signature.CENTRAL_DIRECTORY_END +
+        // number of this disk
+        "\x00\x00" +
+        // number of the disk with the start of the central directory
+        "\x00\x00" +
+        // total number of entries in the central directory on this disk
+        decToHex(entriesCount, 2) +
+        // total number of entries in the central directory
+        decToHex(entriesCount, 2) +
+        // size of the central directory   4 bytes
+        decToHex(centralDirLength, 4) +
+        // offset of start of central directory with respect to the starting disk number
+        decToHex(localDirLength, 4) +
+        // .ZIP file comment length
+        decToHex(encodedComment.length, 2) +
+        // .ZIP file comment
+        encodedComment;
+
+    return dirEnd;
+};
+
+/**
+ * Generate data descriptors for a file entry.
+ * @param {Object} streamInfo the hash generated by a worker, containing information
+ * on the file entry.
+ * @return {String} the data descriptors.
+ */
+var generateDataDescriptors = function (streamInfo) {
+    var descriptor = "";
+    descriptor = signature.DATA_DESCRIPTOR +
+        // crc-32                          4 bytes
+        decToHex(streamInfo["crc32"], 4) +
+        // compressed size                 4 bytes
+        decToHex(streamInfo["compressedSize"], 4) +
+        // uncompressed size               4 bytes
+        decToHex(streamInfo["uncompressedSize"], 4);
+
+    return descriptor;
+};
+
+
+/**
+ * A worker to concatenate other workers to create a zip file.
+ * @param {Boolean} streamFiles `true` to stream the content of the files,
+ * `false` to accumulate it.
+ * @param {String} comment the comment to use.
+ * @param {String} platform the platform to use, "UNIX" or "DOS".
+ * @param {Function} encodeFileName the function to encode file names and comments.
+ */
+function ZipFileWorker(streamFiles, comment, platform, encodeFileName) {
+    GenericWorker.call(this, "ZipFileWorker");
+    // The number of bytes written so far. This doesn't count accumulated chunks.
+    this.bytesWritten = 0;
+    // The comment of the zip file
+    this.zipComment = comment;
+    // The platform "generating" the zip file.
+    this.zipPlatform = platform;
+    // the function to encode file names and comments.
+    this.encodeFileName = encodeFileName;
+    // Should we stream the content of the files ?
+    this.streamFiles = streamFiles;
+    // If `streamFiles` is false, we will need to accumulate the content of the
+    // files to calculate sizes / crc32 (and write them *before* the content).
+    // This boolean indicates if we are accumulating chunks (it will change a lot
+    // during the lifetime of this worker).
+    this.accumulate = false;
+    // The buffer receiving chunks when accumulating content.
+    this.contentBuffer = [];
+    // The list of generated directory records.
+    this.dirRecords = [];
+    // The offset (in bytes) from the beginning of the zip file for the current source.
+    this.currentSourceOffset = 0;
+    // The total number of entries in this zip file.
+    this.entriesCount = 0;
+    // the name of the file currently being added, null when handling the end of the zip file.
+    // Used for the emitted metadata.
+    this.currentFile = null;
+
+
+
+    this._sources = [];
+}
+utils.inherits(ZipFileWorker, GenericWorker);
+
+/**
+ * @see GenericWorker.push
+ */
+ZipFileWorker.prototype.push = function (chunk) {
+
+    var currentFilePercent = chunk.meta.percent || 0;
+    var entriesCount = this.entriesCount;
+    var remainingFiles = this._sources.length;
+
+    if(this.accumulate) {
+        this.contentBuffer.push(chunk);
+    } else {
+        this.bytesWritten += chunk.data.length;
+
+        GenericWorker.prototype.push.call(this, {
+            data : chunk.data,
+            meta : {
+                currentFile : this.currentFile,
+                percent : entriesCount ? (currentFilePercent + 100 * (entriesCount - remainingFiles - 1)) / entriesCount : 100
+            }
+        });
+    }
+};
+
+/**
+ * The worker started a new source (an other worker).
+ * @param {Object} streamInfo the streamInfo object from the new source.
+ */
+ZipFileWorker.prototype.openedSource = function (streamInfo) {
+    this.currentSourceOffset = this.bytesWritten;
+    this.currentFile = streamInfo["file"].name;
+
+    var streamedContent = this.streamFiles && !streamInfo["file"].dir;
+
+    // don't stream folders (because they don't have any content)
+    if(streamedContent) {
+        var record = generateZipParts(streamInfo, streamedContent, false, this.currentSourceOffset, this.zipPlatform, this.encodeFileName);
+        this.push({
+            data : record.fileRecord,
+            meta : {percent:0}
+        });
+    } else {
+        // we need to wait for the whole file before pushing anything
+        this.accumulate = true;
+    }
+};
+
+/**
+ * The worker finished a source (an other worker).
+ * @param {Object} streamInfo the streamInfo object from the finished source.
+ */
+ZipFileWorker.prototype.closedSource = function (streamInfo) {
+    this.accumulate = false;
+    var streamedContent = this.streamFiles && !streamInfo["file"].dir;
+    var record = generateZipParts(streamInfo, streamedContent, true, this.currentSourceOffset, this.zipPlatform, this.encodeFileName);
+
+    this.dirRecords.push(record.dirRecord);
+    if(streamedContent) {
+        // after the streamed file, we put data descriptors
+        this.push({
+            data : generateDataDescriptors(streamInfo),
+            meta : {percent:100}
+        });
+    } else {
+        // the content wasn't streamed, we need to push everything now
+        // first the file record, then the content
+        this.push({
+            data : record.fileRecord,
+            meta : {percent:0}
+        });
+        while(this.contentBuffer.length) {
+            this.push(this.contentBuffer.shift());
+        }
+    }
+    this.currentFile = null;
+};
+
+/**
+ * @see GenericWorker.flush
+ */
+ZipFileWorker.prototype.flush = function () {
+
+    var localDirLength = this.bytesWritten;
+    for(var i = 0; i < this.dirRecords.length; i++) {
+        this.push({
+            data : this.dirRecords[i],
+            meta : {percent:100}
+        });
+    }
+    var centralDirLength = this.bytesWritten - localDirLength;
+
+    var dirEnd = generateCentralDirectoryEnd(this.dirRecords.length, centralDirLength, localDirLength, this.zipComment, this.encodeFileName);
+
+    this.push({
+        data : dirEnd,
+        meta : {percent:100}
+    });
+};
+
+/**
+ * Prepare the next source to be read.
+ */
+ZipFileWorker.prototype.prepareNextSource = function () {
+    this.previous = this._sources.shift();
+    this.openedSource(this.previous.streamInfo);
+    if (this.isPaused) {
+        this.previous.pause();
+    } else {
+        this.previous.resume();
+    }
+};
+
+/**
+ * @see GenericWorker.registerPrevious
+ */
+ZipFileWorker.prototype.registerPrevious = function (previous) {
+    this._sources.push(previous);
+    var self = this;
+
+    previous.on("data", function (chunk) {
+        self.processChunk(chunk);
+    });
+    previous.on("end", function () {
+        self.closedSource(self.previous.streamInfo);
+        if(self._sources.length) {
+            self.prepareNextSource();
+        } else {
+            self.end();
+        }
+    });
+    previous.on("error", function (e) {
+        self.error(e);
+    });
+    return this;
+};
+
+/**
+ * @see GenericWorker.resume
+ */
+ZipFileWorker.prototype.resume = function () {
+    if(!GenericWorker.prototype.resume.call(this)) {
+        return false;
+    }
+
+    if (!this.previous && this._sources.length) {
+        this.prepareNextSource();
+        return true;
+    }
+    if (!this.previous && !this._sources.length && !this.generatedError) {
+        this.end();
+        return true;
+    }
+};
+
+/**
+ * @see GenericWorker.error
+ */
+ZipFileWorker.prototype.error = function (e) {
+    var sources = this._sources;
+    if(!GenericWorker.prototype.error.call(this, e)) {
+        return false;
+    }
+    for(var i = 0; i < sources.length; i++) {
+        try {
+            sources[i].error(e);
+        } catch(e) {
+            // the `error` exploded, nothing to do
+        }
+    }
+    return true;
+};
+
+/**
+ * @see GenericWorker.lock
+ */
+ZipFileWorker.prototype.lock = function () {
+    GenericWorker.prototype.lock.call(this);
+    var sources = this._sources;
+    for(var i = 0; i < sources.length; i++) {
+        sources[i].lock();
+    }
+};
+
+module.exports = ZipFileWorker;

+ 57 - 0
libs/jszip/lib/generate/index.js

@@ -0,0 +1,57 @@
+"use strict";
+
+var compressions = require("../compressions");
+var ZipFileWorker = require("./ZipFileWorker");
+
+/**
+ * Find the compression to use.
+ * @param {String} fileCompression the compression defined at the file level, if any.
+ * @param {String} zipCompression the compression defined at the load() level.
+ * @return {Object} the compression object to use.
+ */
+var getCompression = function (fileCompression, zipCompression) {
+
+    var compressionName = fileCompression || zipCompression;
+    var compression = compressions[compressionName];
+    if (!compression) {
+        throw new Error(compressionName + " is not a valid compression method !");
+    }
+    return compression;
+};
+
+/**
+ * Create a worker to generate a zip file.
+ * @param {JSZip} zip the JSZip instance at the right root level.
+ * @param {Object} options to generate the zip file.
+ * @param {String} comment the comment to use.
+ */
+exports.generateWorker = function (zip, options, comment) {
+
+    var zipFileWorker = new ZipFileWorker(options.streamFiles, comment, options.platform, options.encodeFileName);
+    var entriesCount = 0;
+    try {
+
+        zip.forEach(function (relativePath, file) {
+            entriesCount++;
+            var compression = getCompression(file.options.compression, options.compression);
+            var compressionOptions = file.options.compressionOptions || options.compressionOptions || {};
+            var dir = file.dir, date = file.date;
+
+            file._compressWorker(compression, compressionOptions)
+                .withStreamInfo("file", {
+                    name : relativePath,
+                    dir : dir,
+                    date : date,
+                    comment : file.comment || "",
+                    unixPermissions : file.unixPermissions,
+                    dosPermissions : file.dosPermissions
+                })
+                .pipe(zipFileWorker);
+        });
+        zipFileWorker.entriesCount = entriesCount;
+    } catch (e) {
+        zipFileWorker.error(e);
+    }
+
+    return zipFileWorker;
+};

+ 55 - 0
libs/jszip/lib/index.js

@@ -0,0 +1,55 @@
+"use strict";
+
+/**
+ * Representation a of zip file in js
+ * @constructor
+ */
+function JSZip() {
+    // if this constructor is used without `new`, it adds `new` before itself:
+    if(!(this instanceof JSZip)) {
+        return new JSZip();
+    }
+
+    if(arguments.length) {
+        throw new Error("The constructor with parameters has been removed in JSZip 3.0, please check the upgrade guide.");
+    }
+
+    // object containing the files :
+    // {
+    //   "folder/" : {...},
+    //   "folder/data.txt" : {...}
+    // }
+    // NOTE: we use a null prototype because we do not
+    // want filenames like "toString" coming from a zip file
+    // to overwrite methods and attributes in a normal Object.
+    this.files = Object.create(null);
+
+    this.comment = null;
+
+    // Where we are in the hierarchy
+    this.root = "";
+    this.clone = function() {
+        var newObj = new JSZip();
+        for (var i in this) {
+            if (typeof this[i] !== "function") {
+                newObj[i] = this[i];
+            }
+        }
+        return newObj;
+    };
+}
+JSZip.prototype = require("./object");
+JSZip.prototype.loadAsync = require("./load");
+JSZip.support = require("./support");
+JSZip.defaults = require("./defaults");
+
+// TODO find a better way to handle this version,
+// a require('package.json').version doesn't work with webpack, see #327
+JSZip.version = "3.10.1";
+
+JSZip.loadAsync = function (content, options) {
+    return new JSZip().loadAsync(content, options);
+};
+
+JSZip.external = require("./external");
+module.exports = JSZip;

+ 11 - 0
libs/jszip/lib/license_header.js

@@ -0,0 +1,11 @@
+/*!
+
+JSZip v__VERSION__ - A JavaScript class for generating and reading zip files
+<http://stuartk.com/jszip>
+
+(c) 2009-2016 Stuart Knightley <stuart [at] stuartk.com>
+Dual licenced under the MIT license or GPLv3. See https://raw.github.com/Stuk/jszip/main/LICENSE.markdown.
+
+JSZip uses the library pako released under the MIT license :
+https://github.com/nodeca/pako/blob/main/LICENSE
+*/

+ 88 - 0
libs/jszip/lib/load.js

@@ -0,0 +1,88 @@
+"use strict";
+var utils = require("./utils");
+var external = require("./external");
+var utf8 = require("./utf8");
+var ZipEntries = require("./zipEntries");
+var Crc32Probe = require("./stream/Crc32Probe");
+var nodejsUtils = require("./nodejsUtils");
+
+/**
+ * Check the CRC32 of an entry.
+ * @param {ZipEntry} zipEntry the zip entry to check.
+ * @return {Promise} the result.
+ */
+function checkEntryCRC32(zipEntry) {
+    return new external.Promise(function (resolve, reject) {
+        var worker = zipEntry.decompressed.getContentWorker().pipe(new Crc32Probe());
+        worker.on("error", function (e) {
+            reject(e);
+        })
+            .on("end", function () {
+                if (worker.streamInfo.crc32 !== zipEntry.decompressed.crc32) {
+                    reject(new Error("Corrupted zip : CRC32 mismatch"));
+                } else {
+                    resolve();
+                }
+            })
+            .resume();
+    });
+}
+
+module.exports = function (data, options) {
+    var zip = this;
+    options = utils.extend(options || {}, {
+        base64: false,
+        checkCRC32: false,
+        optimizedBinaryString: false,
+        createFolders: false,
+        decodeFileName: utf8.utf8decode
+    });
+
+    if (nodejsUtils.isNode && nodejsUtils.isStream(data)) {
+        return external.Promise.reject(new Error("JSZip can't accept a stream when loading a zip file."));
+    }
+
+    return utils.prepareContent("the loaded zip file", data, true, options.optimizedBinaryString, options.base64)
+        .then(function (data) {
+            var zipEntries = new ZipEntries(options);
+            zipEntries.load(data);
+            return zipEntries;
+        }).then(function checkCRC32(zipEntries) {
+            var promises = [external.Promise.resolve(zipEntries)];
+            var files = zipEntries.files;
+            if (options.checkCRC32) {
+                for (var i = 0; i < files.length; i++) {
+                    promises.push(checkEntryCRC32(files[i]));
+                }
+            }
+            return external.Promise.all(promises);
+        }).then(function addFiles(results) {
+            var zipEntries = results.shift();
+            var files = zipEntries.files;
+            for (var i = 0; i < files.length; i++) {
+                var input = files[i];
+
+                var unsafeName = input.fileNameStr;
+                var safeName = utils.resolve(input.fileNameStr);
+
+                zip.file(safeName, input.decompressed, {
+                    binary: true,
+                    optimizedBinaryString: true,
+                    date: input.date,
+                    dir: input.dir,
+                    comment: input.fileCommentStr.length ? input.fileCommentStr : null,
+                    unixPermissions: input.unixPermissions,
+                    dosPermissions: input.dosPermissions,
+                    createFolders: options.createFolders
+                });
+                if (!input.dir) {
+                    zip.file(safeName).unsafeOriginalName = unsafeName;
+                }
+            }
+            if (zipEntries.zipComment.length) {
+                zip.comment = zipEntries.zipComment;
+            }
+
+            return zip;
+        });
+};

+ 74 - 0
libs/jszip/lib/nodejs/NodejsStreamInputAdapter.js

@@ -0,0 +1,74 @@
+"use strict";
+
+var utils = require("../utils");
+var GenericWorker = require("../stream/GenericWorker");
+
+/**
+ * A worker that use a nodejs stream as source.
+ * @constructor
+ * @param {String} filename the name of the file entry for this stream.
+ * @param {Readable} stream the nodejs stream.
+ */
+function NodejsStreamInputAdapter(filename, stream) {
+    GenericWorker.call(this, "Nodejs stream input adapter for " + filename);
+    this._upstreamEnded = false;
+    this._bindStream(stream);
+}
+
+utils.inherits(NodejsStreamInputAdapter, GenericWorker);
+
+/**
+ * Prepare the stream and bind the callbacks on it.
+ * Do this ASAP on node 0.10 ! A lazy binding doesn't always work.
+ * @param {Stream} stream the nodejs stream to use.
+ */
+NodejsStreamInputAdapter.prototype._bindStream = function (stream) {
+    var self = this;
+    this._stream = stream;
+    stream.pause();
+    stream
+        .on("data", function (chunk) {
+            self.push({
+                data: chunk,
+                meta : {
+                    percent : 0
+                }
+            });
+        })
+        .on("error", function (e) {
+            if(self.isPaused) {
+                this.generatedError = e;
+            } else {
+                self.error(e);
+            }
+        })
+        .on("end", function () {
+            if(self.isPaused) {
+                self._upstreamEnded = true;
+            } else {
+                self.end();
+            }
+        });
+};
+NodejsStreamInputAdapter.prototype.pause = function () {
+    if(!GenericWorker.prototype.pause.call(this)) {
+        return false;
+    }
+    this._stream.pause();
+    return true;
+};
+NodejsStreamInputAdapter.prototype.resume = function () {
+    if(!GenericWorker.prototype.resume.call(this)) {
+        return false;
+    }
+
+    if(this._upstreamEnded) {
+        this.end();
+    } else {
+        this._stream.resume();
+    }
+
+    return true;
+};
+
+module.exports = NodejsStreamInputAdapter;

+ 42 - 0
libs/jszip/lib/nodejs/NodejsStreamOutputAdapter.js

@@ -0,0 +1,42 @@
+"use strict";
+
+var Readable = require("readable-stream").Readable;
+
+var utils = require("../utils");
+utils.inherits(NodejsStreamOutputAdapter, Readable);
+
+/**
+* A nodejs stream using a worker as source.
+* @see the SourceWrapper in http://nodejs.org/api/stream.html
+* @constructor
+* @param {StreamHelper} helper the helper wrapping the worker
+* @param {Object} options the nodejs stream options
+* @param {Function} updateCb the update callback.
+*/
+function NodejsStreamOutputAdapter(helper, options, updateCb) {
+    Readable.call(this, options);
+    this._helper = helper;
+
+    var self = this;
+    helper.on("data", function (data, meta) {
+        if (!self.push(data)) {
+            self._helper.pause();
+        }
+        if(updateCb) {
+            updateCb(meta);
+        }
+    })
+        .on("error", function(e) {
+            self.emit("error", e);
+        })
+        .on("end", function () {
+            self.push(null);
+        });
+}
+
+
+NodejsStreamOutputAdapter.prototype._read = function() {
+    this._helper.resume();
+};
+
+module.exports = NodejsStreamOutputAdapter;

+ 57 - 0
libs/jszip/lib/nodejsUtils.js

@@ -0,0 +1,57 @@
+"use strict";
+
+module.exports = {
+    /**
+     * True if this is running in Nodejs, will be undefined in a browser.
+     * In a browser, browserify won't include this file and the whole module
+     * will be resolved an empty object.
+     */
+    isNode : typeof Buffer !== "undefined",
+    /**
+     * Create a new nodejs Buffer from an existing content.
+     * @param {Object} data the data to pass to the constructor.
+     * @param {String} encoding the encoding to use.
+     * @return {Buffer} a new Buffer.
+     */
+    newBufferFrom: function(data, encoding) {
+        if (Buffer.from && Buffer.from !== Uint8Array.from) {
+            return Buffer.from(data, encoding);
+        } else {
+            if (typeof data === "number") {
+                // Safeguard for old Node.js versions. On newer versions,
+                // Buffer.from(number) / Buffer(number, encoding) already throw.
+                throw new Error("The \"data\" argument must not be a number");
+            }
+            return new Buffer(data, encoding);
+        }
+    },
+    /**
+     * Create a new nodejs Buffer with the specified size.
+     * @param {Integer} size the size of the buffer.
+     * @return {Buffer} a new Buffer.
+     */
+    allocBuffer: function (size) {
+        if (Buffer.alloc) {
+            return Buffer.alloc(size);
+        } else {
+            var buf = new Buffer(size);
+            buf.fill(0);
+            return buf;
+        }
+    },
+    /**
+     * Find out if an object is a Buffer.
+     * @param {Object} b the object to test.
+     * @return {Boolean} true if the object is a Buffer, false otherwise.
+     */
+    isBuffer : function(b){
+        return Buffer.isBuffer(b);
+    },
+
+    isStream : function (obj) {
+        return obj &&
+            typeof obj.on === "function" &&
+            typeof obj.pause === "function" &&
+            typeof obj.resume === "function";
+    }
+};

+ 384 - 0
libs/jszip/lib/object.js

@@ -0,0 +1,384 @@
+"use strict";
+var utf8 = require("./utf8");
+var utils = require("./utils");
+var GenericWorker = require("./stream/GenericWorker");
+var StreamHelper = require("./stream/StreamHelper");
+var defaults = require("./defaults");
+var CompressedObject = require("./compressedObject");
+var ZipObject = require("./zipObject");
+var generate = require("./generate");
+var nodejsUtils = require("./nodejsUtils");
+var NodejsStreamInputAdapter = require("./nodejs/NodejsStreamInputAdapter");
+
+
+/**
+ * Add a file in the current folder.
+ * @private
+ * @param {string} name the name of the file
+ * @param {String|ArrayBuffer|Uint8Array|Buffer} data the data of the file
+ * @param {Object} originalOptions the options of the file
+ * @return {Object} the new file.
+ */
+var fileAdd = function(name, data, originalOptions) {
+    // be sure sub folders exist
+    var dataType = utils.getTypeOf(data),
+        parent;
+
+
+    /*
+     * Correct options.
+     */
+
+    var o = utils.extend(originalOptions || {}, defaults);
+    o.date = o.date || new Date();
+    if (o.compression !== null) {
+        o.compression = o.compression.toUpperCase();
+    }
+
+    if (typeof o.unixPermissions === "string") {
+        o.unixPermissions = parseInt(o.unixPermissions, 8);
+    }
+
+    // UNX_IFDIR  0040000 see zipinfo.c
+    if (o.unixPermissions && (o.unixPermissions & 0x4000)) {
+        o.dir = true;
+    }
+    // Bit 4    Directory
+    if (o.dosPermissions && (o.dosPermissions & 0x0010)) {
+        o.dir = true;
+    }
+
+    if (o.dir) {
+        name = forceTrailingSlash(name);
+    }
+    if (o.createFolders && (parent = parentFolder(name))) {
+        folderAdd.call(this, parent, true);
+    }
+
+    var isUnicodeString = dataType === "string" && o.binary === false && o.base64 === false;
+    if (!originalOptions || typeof originalOptions.binary === "undefined") {
+        o.binary = !isUnicodeString;
+    }
+
+
+    var isCompressedEmpty = (data instanceof CompressedObject) && data.uncompressedSize === 0;
+
+    if (isCompressedEmpty || o.dir || !data || data.length === 0) {
+        o.base64 = false;
+        o.binary = true;
+        data = "";
+        o.compression = "STORE";
+        dataType = "string";
+    }
+
+    /*
+     * Convert content to fit.
+     */
+
+    var zipObjectContent = null;
+    if (data instanceof CompressedObject || data instanceof GenericWorker) {
+        zipObjectContent = data;
+    } else if (nodejsUtils.isNode && nodejsUtils.isStream(data)) {
+        zipObjectContent = new NodejsStreamInputAdapter(name, data);
+    } else {
+        zipObjectContent = utils.prepareContent(name, data, o.binary, o.optimizedBinaryString, o.base64);
+    }
+
+    var object = new ZipObject(name, zipObjectContent, o);
+    this.files[name] = object;
+    /*
+    TODO: we can't throw an exception because we have async promises
+    (we can have a promise of a Date() for example) but returning a
+    promise is useless because file(name, data) returns the JSZip
+    object for chaining. Should we break that to allow the user
+    to catch the error ?
+
+    return external.Promise.resolve(zipObjectContent)
+    .then(function () {
+        return object;
+    });
+    */
+};
+
+/**
+ * Find the parent folder of the path.
+ * @private
+ * @param {string} path the path to use
+ * @return {string} the parent folder, or ""
+ */
+var parentFolder = function (path) {
+    if (path.slice(-1) === "/") {
+        path = path.substring(0, path.length - 1);
+    }
+    var lastSlash = path.lastIndexOf("/");
+    return (lastSlash > 0) ? path.substring(0, lastSlash) : "";
+};
+
+/**
+ * Returns the path with a slash at the end.
+ * @private
+ * @param {String} path the path to check.
+ * @return {String} the path with a trailing slash.
+ */
+var forceTrailingSlash = function(path) {
+    // Check the name ends with a /
+    if (path.slice(-1) !== "/") {
+        path += "/"; // IE doesn't like substr(-1)
+    }
+    return path;
+};
+
+/**
+ * Add a (sub) folder in the current folder.
+ * @private
+ * @param {string} name the folder's name
+ * @param {boolean=} [createFolders] If true, automatically create sub
+ *  folders. Defaults to false.
+ * @return {Object} the new folder.
+ */
+var folderAdd = function(name, createFolders) {
+    createFolders = (typeof createFolders !== "undefined") ? createFolders : defaults.createFolders;
+
+    name = forceTrailingSlash(name);
+
+    // Does this folder already exist?
+    if (!this.files[name]) {
+        fileAdd.call(this, name, null, {
+            dir: true,
+            createFolders: createFolders
+        });
+    }
+    return this.files[name];
+};
+
+/**
+* Cross-window, cross-Node-context regular expression detection
+* @param  {Object}  object Anything
+* @return {Boolean}        true if the object is a regular expression,
+* false otherwise
+*/
+function isRegExp(object) {
+    return Object.prototype.toString.call(object) === "[object RegExp]";
+}
+
+// return the actual prototype of JSZip
+var out = {
+    /**
+     * @see loadAsync
+     */
+    load: function() {
+        throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide.");
+    },
+
+
+    /**
+     * Call a callback function for each entry at this folder level.
+     * @param {Function} cb the callback function:
+     * function (relativePath, file) {...}
+     * It takes 2 arguments : the relative path and the file.
+     */
+    forEach: function(cb) {
+        var filename, relativePath, file;
+        // ignore warning about unwanted properties because this.files is a null prototype object
+        /* eslint-disable-next-line guard-for-in */
+        for (filename in this.files) {
+            file = this.files[filename];
+            relativePath = filename.slice(this.root.length, filename.length);
+            if (relativePath && filename.slice(0, this.root.length) === this.root) { // the file is in the current root
+                cb(relativePath, file); // TODO reverse the parameters ? need to be clean AND consistent with the filter search fn...
+            }
+        }
+    },
+
+    /**
+     * Filter nested files/folders with the specified function.
+     * @param {Function} search the predicate to use :
+     * function (relativePath, file) {...}
+     * It takes 2 arguments : the relative path and the file.
+     * @return {Array} An array of matching elements.
+     */
+    filter: function(search) {
+        var result = [];
+        this.forEach(function (relativePath, entry) {
+            if (search(relativePath, entry)) { // the file matches the function
+                result.push(entry);
+            }
+
+        });
+        return result;
+    },
+
+    /**
+     * Add a file to the zip file, or search a file.
+     * @param   {string|RegExp} name The name of the file to add (if data is defined),
+     * the name of the file to find (if no data) or a regex to match files.
+     * @param   {String|ArrayBuffer|Uint8Array|Buffer} data  The file data, either raw or base64 encoded
+     * @param   {Object} o     File options
+     * @return  {JSZip|Object|Array} this JSZip object (when adding a file),
+     * a file (when searching by string) or an array of files (when searching by regex).
+     */
+    file: function(name, data, o) {
+        if (arguments.length === 1) {
+            if (isRegExp(name)) {
+                var regexp = name;
+                return this.filter(function(relativePath, file) {
+                    return !file.dir && regexp.test(relativePath);
+                });
+            }
+            else { // text
+                var obj = this.files[this.root + name];
+                if (obj && !obj.dir) {
+                    return obj;
+                } else {
+                    return null;
+                }
+            }
+        }
+        else { // more than one argument : we have data !
+            name = this.root + name;
+            fileAdd.call(this, name, data, o);
+        }
+        return this;
+    },
+
+    /**
+     * Add a directory to the zip file, or search.
+     * @param   {String|RegExp} arg The name of the directory to add, or a regex to search folders.
+     * @return  {JSZip} an object with the new directory as the root, or an array containing matching folders.
+     */
+    folder: function(arg) {
+        if (!arg) {
+            return this;
+        }
+
+        if (isRegExp(arg)) {
+            return this.filter(function(relativePath, file) {
+                return file.dir && arg.test(relativePath);
+            });
+        }
+
+        // else, name is a new folder
+        var name = this.root + arg;
+        var newFolder = folderAdd.call(this, name);
+
+        // Allow chaining by returning a new object with this folder as the root
+        var ret = this.clone();
+        ret.root = newFolder.name;
+        return ret;
+    },
+
+    /**
+     * Delete a file, or a directory and all sub-files, from the zip
+     * @param {string} name the name of the file to delete
+     * @return {JSZip} this JSZip object
+     */
+    remove: function(name) {
+        name = this.root + name;
+        var file = this.files[name];
+        if (!file) {
+            // Look for any folders
+            if (name.slice(-1) !== "/") {
+                name += "/";
+            }
+            file = this.files[name];
+        }
+
+        if (file && !file.dir) {
+            // file
+            delete this.files[name];
+        } else {
+            // maybe a folder, delete recursively
+            var kids = this.filter(function(relativePath, file) {
+                return file.name.slice(0, name.length) === name;
+            });
+            for (var i = 0; i < kids.length; i++) {
+                delete this.files[kids[i].name];
+            }
+        }
+
+        return this;
+    },
+
+    /**
+     * @deprecated This method has been removed in JSZip 3.0, please check the upgrade guide.
+     */
+    generate: function() {
+        throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide.");
+    },
+
+    /**
+     * Generate the complete zip file as an internal stream.
+     * @param {Object} options the options to generate the zip file :
+     * - compression, "STORE" by default.
+     * - type, "base64" by default. Values are : string, base64, uint8array, arraybuffer, blob.
+     * @return {StreamHelper} the streamed zip file.
+     */
+    generateInternalStream: function(options) {
+        var worker, opts = {};
+        try {
+            opts = utils.extend(options || {}, {
+                streamFiles: false,
+                compression: "STORE",
+                compressionOptions : null,
+                type: "",
+                platform: "DOS",
+                comment: null,
+                mimeType: "application/zip",
+                encodeFileName: utf8.utf8encode
+            });
+
+            opts.type = opts.type.toLowerCase();
+            opts.compression = opts.compression.toUpperCase();
+
+            // "binarystring" is preferred but the internals use "string".
+            if(opts.type === "binarystring") {
+                opts.type = "string";
+            }
+
+            if (!opts.type) {
+                throw new Error("No output type specified.");
+            }
+
+            utils.checkSupport(opts.type);
+
+            // accept nodejs `process.platform`
+            if(
+                opts.platform === "darwin" ||
+                opts.platform === "freebsd" ||
+                opts.platform === "linux" ||
+                opts.platform === "sunos"
+            ) {
+                opts.platform = "UNIX";
+            }
+            if (opts.platform === "win32") {
+                opts.platform = "DOS";
+            }
+
+            var comment = opts.comment || this.comment || "";
+            worker = generate.generateWorker(this, opts, comment);
+        } catch (e) {
+            worker = new GenericWorker("error");
+            worker.error(e);
+        }
+        return new StreamHelper(worker, opts.type || "string", opts.mimeType);
+    },
+    /**
+     * Generate the complete zip file asynchronously.
+     * @see generateInternalStream
+     */
+    generateAsync: function(options, onUpdate) {
+        return this.generateInternalStream(options).accumulate(onUpdate);
+    },
+    /**
+     * Generate the complete zip file asynchronously.
+     * @see generateInternalStream
+     */
+    generateNodeStream: function(options, onUpdate) {
+        options = options || {};
+        if (!options.type) {
+            options.type = "nodebuffer";
+        }
+        return this.generateInternalStream(options).toNodejsStream(onUpdate);
+    }
+};
+module.exports = out;

+ 10 - 0
libs/jszip/lib/readable-stream-browser.js

@@ -0,0 +1,10 @@
+"use strict";
+/*
+ * This file is used by module bundlers (browserify/webpack/etc) when
+ * including a stream implementation. We use "readable-stream" to get a
+ * consistent behavior between nodejs versions but bundlers often have a shim
+ * for "stream". Using this shim greatly improve the compatibility and greatly
+ * reduce the final size of the bundle (only one stream implementation, not
+ * two).
+ */
+module.exports = require("stream");

+ 57 - 0
libs/jszip/lib/reader/ArrayReader.js

@@ -0,0 +1,57 @@
+"use strict";
+var DataReader = require("./DataReader");
+var utils = require("../utils");
+
+function ArrayReader(data) {
+    DataReader.call(this, data);
+    for(var i = 0; i < this.data.length; i++) {
+        data[i] = data[i] & 0xFF;
+    }
+}
+utils.inherits(ArrayReader, DataReader);
+/**
+ * @see DataReader.byteAt
+ */
+ArrayReader.prototype.byteAt = function(i) {
+    return this.data[this.zero + i];
+};
+/**
+ * @see DataReader.lastIndexOfSignature
+ */
+ArrayReader.prototype.lastIndexOfSignature = function(sig) {
+    var sig0 = sig.charCodeAt(0),
+        sig1 = sig.charCodeAt(1),
+        sig2 = sig.charCodeAt(2),
+        sig3 = sig.charCodeAt(3);
+    for (var i = this.length - 4; i >= 0; --i) {
+        if (this.data[i] === sig0 && this.data[i + 1] === sig1 && this.data[i + 2] === sig2 && this.data[i + 3] === sig3) {
+            return i - this.zero;
+        }
+    }
+
+    return -1;
+};
+/**
+ * @see DataReader.readAndCheckSignature
+ */
+ArrayReader.prototype.readAndCheckSignature = function (sig) {
+    var sig0 = sig.charCodeAt(0),
+        sig1 = sig.charCodeAt(1),
+        sig2 = sig.charCodeAt(2),
+        sig3 = sig.charCodeAt(3),
+        data = this.readData(4);
+    return sig0 === data[0] && sig1 === data[1] && sig2 === data[2] && sig3 === data[3];
+};
+/**
+ * @see DataReader.readData
+ */
+ArrayReader.prototype.readData = function(size) {
+    this.checkOffset(size);
+    if(size === 0) {
+        return [];
+    }
+    var result = this.data.slice(this.zero + this.index, this.zero + this.index + size);
+    this.index += size;
+    return result;
+};
+module.exports = ArrayReader;

+ 116 - 0
libs/jszip/lib/reader/DataReader.js

@@ -0,0 +1,116 @@
+"use strict";
+var utils = require("../utils");
+
+function DataReader(data) {
+    this.data = data; // type : see implementation
+    this.length = data.length;
+    this.index = 0;
+    this.zero = 0;
+}
+DataReader.prototype = {
+    /**
+     * Check that the offset will not go too far.
+     * @param {string} offset the additional offset to check.
+     * @throws {Error} an Error if the offset is out of bounds.
+     */
+    checkOffset: function(offset) {
+        this.checkIndex(this.index + offset);
+    },
+    /**
+     * Check that the specified index will not be too far.
+     * @param {string} newIndex the index to check.
+     * @throws {Error} an Error if the index is out of bounds.
+     */
+    checkIndex: function(newIndex) {
+        if (this.length < this.zero + newIndex || newIndex < 0) {
+            throw new Error("End of data reached (data length = " + this.length + ", asked index = " + (newIndex) + "). Corrupted zip ?");
+        }
+    },
+    /**
+     * Change the index.
+     * @param {number} newIndex The new index.
+     * @throws {Error} if the new index is out of the data.
+     */
+    setIndex: function(newIndex) {
+        this.checkIndex(newIndex);
+        this.index = newIndex;
+    },
+    /**
+     * Skip the next n bytes.
+     * @param {number} n the number of bytes to skip.
+     * @throws {Error} if the new index is out of the data.
+     */
+    skip: function(n) {
+        this.setIndex(this.index + n);
+    },
+    /**
+     * Get the byte at the specified index.
+     * @param {number} i the index to use.
+     * @return {number} a byte.
+     */
+    byteAt: function() {
+        // see implementations
+    },
+    /**
+     * Get the next number with a given byte size.
+     * @param {number} size the number of bytes to read.
+     * @return {number} the corresponding number.
+     */
+    readInt: function(size) {
+        var result = 0,
+            i;
+        this.checkOffset(size);
+        for (i = this.index + size - 1; i >= this.index; i--) {
+            result = (result << 8) + this.byteAt(i);
+        }
+        this.index += size;
+        return result;
+    },
+    /**
+     * Get the next string with a given byte size.
+     * @param {number} size the number of bytes to read.
+     * @return {string} the corresponding string.
+     */
+    readString: function(size) {
+        return utils.transformTo("string", this.readData(size));
+    },
+    /**
+     * Get raw data without conversion, <size> bytes.
+     * @param {number} size the number of bytes to read.
+     * @return {Object} the raw data, implementation specific.
+     */
+    readData: function() {
+        // see implementations
+    },
+    /**
+     * Find the last occurrence of a zip signature (4 bytes).
+     * @param {string} sig the signature to find.
+     * @return {number} the index of the last occurrence, -1 if not found.
+     */
+    lastIndexOfSignature: function() {
+        // see implementations
+    },
+    /**
+     * Read the signature (4 bytes) at the current position and compare it with sig.
+     * @param {string} sig the expected signature
+     * @return {boolean} true if the signature matches, false otherwise.
+     */
+    readAndCheckSignature: function() {
+        // see implementations
+    },
+    /**
+     * Get the next date.
+     * @return {Date} the date.
+     */
+    readDate: function() {
+        var dostime = this.readInt(4);
+        return new Date(Date.UTC(
+            ((dostime >> 25) & 0x7f) + 1980, // year
+            ((dostime >> 21) & 0x0f) - 1, // month
+            (dostime >> 16) & 0x1f, // day
+            (dostime >> 11) & 0x1f, // hour
+            (dostime >> 5) & 0x3f, // minute
+            (dostime & 0x1f) << 1)); // second
+    }
+};
+module.exports = DataReader;

+ 19 - 0
libs/jszip/lib/reader/NodeBufferReader.js

@@ -0,0 +1,19 @@
+"use strict";
+var Uint8ArrayReader = require("./Uint8ArrayReader");
+var utils = require("../utils");
+
+function NodeBufferReader(data) {
+    Uint8ArrayReader.call(this, data);
+}
+utils.inherits(NodeBufferReader, Uint8ArrayReader);
+
+/**
+ * @see DataReader.readData
+ */
+NodeBufferReader.prototype.readData = function(size) {
+    this.checkOffset(size);
+    var result = this.data.slice(this.zero + this.index, this.zero + this.index + size);
+    this.index += size;
+    return result;
+};
+module.exports = NodeBufferReader;

+ 38 - 0
libs/jszip/lib/reader/StringReader.js

@@ -0,0 +1,38 @@
+"use strict";
+var DataReader = require("./DataReader");
+var utils = require("../utils");
+
+function StringReader(data) {
+    DataReader.call(this, data);
+}
+utils.inherits(StringReader, DataReader);
+/**
+ * @see DataReader.byteAt
+ */
+StringReader.prototype.byteAt = function(i) {
+    return this.data.charCodeAt(this.zero + i);
+};
+/**
+ * @see DataReader.lastIndexOfSignature
+ */
+StringReader.prototype.lastIndexOfSignature = function(sig) {
+    return this.data.lastIndexOf(sig) - this.zero;
+};
+/**
+ * @see DataReader.readAndCheckSignature
+ */
+StringReader.prototype.readAndCheckSignature = function (sig) {
+    var data = this.readData(4);
+    return sig === data;
+};
+/**
+ * @see DataReader.readData
+ */
+StringReader.prototype.readData = function(size) {
+    this.checkOffset(size);
+    // this will work because the constructor applied the "& 0xff" mask.
+    var result = this.data.slice(this.zero + this.index, this.zero + this.index + size);
+    this.index += size;
+    return result;
+};
+module.exports = StringReader;

+ 22 - 0
libs/jszip/lib/reader/Uint8ArrayReader.js

@@ -0,0 +1,22 @@
+"use strict";
+var ArrayReader = require("./ArrayReader");
+var utils = require("../utils");
+
+function Uint8ArrayReader(data) {
+    ArrayReader.call(this, data);
+}
+utils.inherits(Uint8ArrayReader, ArrayReader);
+/**
+ * @see DataReader.readData
+ */
+Uint8ArrayReader.prototype.readData = function(size) {
+    this.checkOffset(size);
+    if(size === 0) {
+        // in IE10, when using subarray(idx, idx), we get the array [0x00] instead of [].
+        return new Uint8Array(0);
+    }
+    var result = this.data.subarray(this.zero + this.index, this.zero + this.index + size);
+    this.index += size;
+    return result;
+};
+module.exports = Uint8ArrayReader;

+ 28 - 0
libs/jszip/lib/reader/readerFor.js

@@ -0,0 +1,28 @@
+"use strict";
+
+var utils = require("../utils");
+var support = require("../support");
+var ArrayReader = require("./ArrayReader");
+var StringReader = require("./StringReader");
+var NodeBufferReader = require("./NodeBufferReader");
+var Uint8ArrayReader = require("./Uint8ArrayReader");
+
+/**
+ * Create a reader adapted to the data.
+ * @param {String|ArrayBuffer|Uint8Array|Buffer} data the data to read.
+ * @return {DataReader} the data reader.
+ */
+module.exports = function (data) {
+    var type = utils.getTypeOf(data);
+    utils.checkSupport(type);
+    if (type === "string" && !support.uint8array) {
+        return new StringReader(data);
+    }
+    if (type === "nodebuffer") {
+        return new NodeBufferReader(data);
+    }
+    if (support.uint8array) {
+        return new Uint8ArrayReader(utils.transformTo("uint8array", data));
+    }
+    return new ArrayReader(utils.transformTo("array", data));
+};

+ 7 - 0
libs/jszip/lib/signature.js

@@ -0,0 +1,7 @@
+"use strict";
+exports.LOCAL_FILE_HEADER = "PK\x03\x04";
+exports.CENTRAL_FILE_HEADER = "PK\x01\x02";
+exports.CENTRAL_DIRECTORY_END = "PK\x05\x06";
+exports.ZIP64_CENTRAL_DIRECTORY_LOCATOR = "PK\x06\x07";
+exports.ZIP64_CENTRAL_DIRECTORY_END = "PK\x06\x06";
+exports.DATA_DESCRIPTOR = "PK\x07\x08";

+ 26 - 0
libs/jszip/lib/stream/ConvertWorker.js

@@ -0,0 +1,26 @@
+"use strict";
+
+var GenericWorker = require("./GenericWorker");
+var utils = require("../utils");
+
+/**
+ * A worker which convert chunks to a specified type.
+ * @constructor
+ * @param {String} destType the destination type.
+ */
+function ConvertWorker(destType) {
+    GenericWorker.call(this, "ConvertWorker to " + destType);
+    this.destType = destType;
+}
+utils.inherits(ConvertWorker, GenericWorker);
+
+/**
+ * @see GenericWorker.processChunk
+ */
+ConvertWorker.prototype.processChunk = function (chunk) {
+    this.push({
+        data : utils.transformTo(this.destType, chunk.data),
+        meta : chunk.meta
+    });
+};
+module.exports = ConvertWorker;

+ 24 - 0
libs/jszip/lib/stream/Crc32Probe.js

@@ -0,0 +1,24 @@
+"use strict";
+
+var GenericWorker = require("./GenericWorker");
+var crc32 = require("../crc32");
+var utils = require("../utils");
+
+/**
+ * A worker which calculate the crc32 of the data flowing through.
+ * @constructor
+ */
+function Crc32Probe() {
+    GenericWorker.call(this, "Crc32Probe");
+    this.withStreamInfo("crc32", 0);
+}
+utils.inherits(Crc32Probe, GenericWorker);
+
+/**
+ * @see GenericWorker.processChunk
+ */
+Crc32Probe.prototype.processChunk = function (chunk) {
+    this.streamInfo.crc32 = crc32(chunk.data, this.streamInfo.crc32 || 0);
+    this.push(chunk);
+};
+module.exports = Crc32Probe;

+ 29 - 0
libs/jszip/lib/stream/DataLengthProbe.js

@@ -0,0 +1,29 @@
+"use strict";
+
+var utils = require("../utils");
+var GenericWorker = require("./GenericWorker");
+
+/**
+ * A worker which calculate the total length of the data flowing through.
+ * @constructor
+ * @param {String} propName the name used to expose the length
+ */
+function DataLengthProbe(propName) {
+    GenericWorker.call(this, "DataLengthProbe for " + propName);
+    this.propName = propName;
+    this.withStreamInfo(propName, 0);
+}
+utils.inherits(DataLengthProbe, GenericWorker);
+
+/**
+ * @see GenericWorker.processChunk
+ */
+DataLengthProbe.prototype.processChunk = function (chunk) {
+    if(chunk) {
+        var length = this.streamInfo[this.propName] || 0;
+        this.streamInfo[this.propName] = length + chunk.data.length;
+    }
+    GenericWorker.prototype.processChunk.call(this, chunk);
+};
+module.exports = DataLengthProbe;
+

+ 116 - 0
libs/jszip/lib/stream/DataWorker.js

@@ -0,0 +1,116 @@
+"use strict";
+
+var utils = require("../utils");
+var GenericWorker = require("./GenericWorker");
+
+// the size of the generated chunks
+// TODO expose this as a public variable
+var DEFAULT_BLOCK_SIZE = 16 * 1024;
+
+/**
+ * A worker that reads a content and emits chunks.
+ * @constructor
+ * @param {Promise} dataP the promise of the data to split
+ */
+function DataWorker(dataP) {
+    GenericWorker.call(this, "DataWorker");
+    var self = this;
+    this.dataIsReady = false;
+    this.index = 0;
+    this.max = 0;
+    this.data = null;
+    this.type = "";
+
+    this._tickScheduled = false;
+
+    dataP.then(function (data) {
+        self.dataIsReady = true;
+        self.data = data;
+        self.max = data && data.length || 0;
+        self.type = utils.getTypeOf(data);
+        if(!self.isPaused) {
+            self._tickAndRepeat();
+        }
+    }, function (e) {
+        self.error(e);
+    });
+}
+
+utils.inherits(DataWorker, GenericWorker);
+
+/**
+ * @see GenericWorker.cleanUp
+ */
+DataWorker.prototype.cleanUp = function () {
+    GenericWorker.prototype.cleanUp.call(this);
+    this.data = null;
+};
+
+/**
+ * @see GenericWorker.resume
+ */
+DataWorker.prototype.resume = function () {
+    if(!GenericWorker.prototype.resume.call(this)) {
+        return false;
+    }
+
+    if (!this._tickScheduled && this.dataIsReady) {
+        this._tickScheduled = true;
+        utils.delay(this._tickAndRepeat, [], this);
+    }
+    return true;
+};
+
+/**
+ * Trigger a tick a schedule an other call to this function.
+ */
+DataWorker.prototype._tickAndRepeat = function() {
+    this._tickScheduled = false;
+    if(this.isPaused || this.isFinished) {
+        return;
+    }
+    this._tick();
+    if(!this.isFinished) {
+        utils.delay(this._tickAndRepeat, [], this);
+        this._tickScheduled = true;
+    }
+};
+
+/**
+ * Read and push a chunk.
+ */
+DataWorker.prototype._tick = function() {
+
+    if(this.isPaused || this.isFinished) {
+        return false;
+    }
+
+    var size = DEFAULT_BLOCK_SIZE;
+    var data = null, nextIndex = Math.min(this.max, this.index + size);
+    if (this.index >= this.max) {
+        // EOF
+        return this.end();
+    } else {
+        switch(this.type) {
+        case "string":
+            data = this.data.substring(this.index, nextIndex);
+            break;
+        case "uint8array":
+            data = this.data.subarray(this.index, nextIndex);
+            break;
+        case "array":
+        case "nodebuffer":
+            data = this.data.slice(this.index, nextIndex);
+            break;
+        }
+        this.index = nextIndex;
+        return this.push({
+            data : data,
+            meta : {
+                percent : this.max ? this.index / this.max * 100 : 0
+            }
+        });
+    }
+};
+
+module.exports = DataWorker;

+ 263 - 0
libs/jszip/lib/stream/GenericWorker.js

@@ -0,0 +1,263 @@
+"use strict";
+
+/**
+ * A worker that does nothing but passing chunks to the next one. This is like
+ * a nodejs stream but with some differences. On the good side :
+ * - it works on IE 6-9 without any issue / polyfill
+ * - it weights less than the full dependencies bundled with browserify
+ * - it forwards errors (no need to declare an error handler EVERYWHERE)
+ *
+ * A chunk is an object with 2 attributes : `meta` and `data`. The former is an
+ * object containing anything (`percent` for example), see each worker for more
+ * details. The latter is the real data (String, Uint8Array, etc).
+ *
+ * @constructor
+ * @param {String} name the name of the stream (mainly used for debugging purposes)
+ */
+function GenericWorker(name) {
+    // the name of the worker
+    this.name = name || "default";
+    // an object containing metadata about the workers chain
+    this.streamInfo = {};
+    // an error which happened when the worker was paused
+    this.generatedError = null;
+    // an object containing metadata to be merged by this worker into the general metadata
+    this.extraStreamInfo = {};
+    // true if the stream is paused (and should not do anything), false otherwise
+    this.isPaused = true;
+    // true if the stream is finished (and should not do anything), false otherwise
+    this.isFinished = false;
+    // true if the stream is locked to prevent further structure updates (pipe), false otherwise
+    this.isLocked = false;
+    // the event listeners
+    this._listeners = {
+        "data":[],
+        "end":[],
+        "error":[]
+    };
+    // the previous worker, if any
+    this.previous = null;
+}
+
+GenericWorker.prototype = {
+    /**
+     * Push a chunk to the next workers.
+     * @param {Object} chunk the chunk to push
+     */
+    push : function (chunk) {
+        this.emit("data", chunk);
+    },
+    /**
+     * End the stream.
+     * @return {Boolean} true if this call ended the worker, false otherwise.
+     */
+    end : function () {
+        if (this.isFinished) {
+            return false;
+        }
+
+        this.flush();
+        try {
+            this.emit("end");
+            this.cleanUp();
+            this.isFinished = true;
+        } catch (e) {
+            this.emit("error", e);
+        }
+        return true;
+    },
+    /**
+     * End the stream with an error.
+     * @param {Error} e the error which caused the premature end.
+     * @return {Boolean} true if this call ended the worker with an error, false otherwise.
+     */
+    error : function (e) {
+        if (this.isFinished) {
+            return false;
+        }
+
+        if(this.isPaused) {
+            this.generatedError = e;
+        } else {
+            this.isFinished = true;
+
+            this.emit("error", e);
+
+            // in the workers chain exploded in the middle of the chain,
+            // the error event will go downward but we also need to notify
+            // workers upward that there has been an error.
+            if(this.previous) {
+                this.previous.error(e);
+            }
+
+            this.cleanUp();
+        }
+        return true;
+    },
+    /**
+     * Add a callback on an event.
+     * @param {String} name the name of the event (data, end, error)
+     * @param {Function} listener the function to call when the event is triggered
+     * @return {GenericWorker} the current object for chainability
+     */
+    on : function (name, listener) {
+        this._listeners[name].push(listener);
+        return this;
+    },
+    /**
+     * Clean any references when a worker is ending.
+     */
+    cleanUp : function () {
+        this.streamInfo = this.generatedError = this.extraStreamInfo = null;
+        this._listeners = [];
+    },
+    /**
+     * Trigger an event. This will call registered callback with the provided arg.
+     * @param {String} name the name of the event (data, end, error)
+     * @param {Object} arg the argument to call the callback with.
+     */
+    emit : function (name, arg) {
+        if (this._listeners[name]) {
+            for(var i = 0; i < this._listeners[name].length; i++) {
+                this._listeners[name][i].call(this, arg);
+            }
+        }
+    },
+    /**
+     * Chain a worker with an other.
+     * @param {Worker} next the worker receiving events from the current one.
+     * @return {worker} the next worker for chainability
+     */
+    pipe : function (next) {
+        return next.registerPrevious(this);
+    },
+    /**
+     * Same as `pipe` in the other direction.
+     * Using an API with `pipe(next)` is very easy.
+     * Implementing the API with the point of view of the next one registering
+     * a source is easier, see the ZipFileWorker.
+     * @param {Worker} previous the previous worker, sending events to this one
+     * @return {Worker} the current worker for chainability
+     */
+    registerPrevious : function (previous) {
+        if (this.isLocked) {
+            throw new Error("The stream '" + this + "' has already been used.");
+        }
+
+        // sharing the streamInfo...
+        this.streamInfo = previous.streamInfo;
+        // ... and adding our own bits
+        this.mergeStreamInfo();
+        this.previous =  previous;
+        var self = this;
+        previous.on("data", function (chunk) {
+            self.processChunk(chunk);
+        });
+        previous.on("end", function () {
+            self.end();
+        });
+        previous.on("error", function (e) {
+            self.error(e);
+        });
+        return this;
+    },
+    /**
+     * Pause the stream so it doesn't send events anymore.
+     * @return {Boolean} true if this call paused the worker, false otherwise.
+     */
+    pause : function () {
+        if(this.isPaused || this.isFinished) {
+            return false;
+        }
+        this.isPaused = true;
+
+        if(this.previous) {
+            this.previous.pause();
+        }
+        return true;
+    },
+    /**
+     * Resume a paused stream.
+     * @return {Boolean} true if this call resumed the worker, false otherwise.
+     */
+    resume : function () {
+        if(!this.isPaused || this.isFinished) {
+            return false;
+        }
+        this.isPaused = false;
+
+        // if true, the worker tried to resume but failed
+        var withError = false;
+        if(this.generatedError) {
+            this.error(this.generatedError);
+            withError = true;
+        }
+        if(this.previous) {
+            this.previous.resume();
+        }
+
+        return !withError;
+    },
+    /**
+     * Flush any remaining bytes as the stream is ending.
+     */
+    flush : function () {},
+    /**
+     * Process a chunk. This is usually the method overridden.
+     * @param {Object} chunk the chunk to process.
+     */
+    processChunk : function(chunk) {
+        this.push(chunk);
+    },
+    /**
+     * Add a key/value to be added in the workers chain streamInfo once activated.
+     * @param {String} key the key to use
+     * @param {Object} value the associated value
+     * @return {Worker} the current worker for chainability
+     */
+    withStreamInfo : function (key, value) {
+        this.extraStreamInfo[key] = value;
+        this.mergeStreamInfo();
+        return this;
+    },
+    /**
+     * Merge this worker's streamInfo into the chain's streamInfo.
+     */
+    mergeStreamInfo : function () {
+        for(var key in this.extraStreamInfo) {
+            if (!Object.prototype.hasOwnProperty.call(this.extraStreamInfo, key)) {
+                continue;
+            }
+            this.streamInfo[key] = this.extraStreamInfo[key];
+        }
+    },
+
+    /**
+     * Lock the stream to prevent further updates on the workers chain.
+     * After calling this method, all calls to pipe will fail.
+     */
+    lock: function () {
+        if (this.isLocked) {
+            throw new Error("The stream '" + this + "' has already been used.");
+        }
+        this.isLocked = true;
+        if (this.previous) {
+            this.previous.lock();
+        }
+    },
+
+    /**
+     *
+     * Pretty print the workers chain.
+     */
+    toString : function () {
+        var me = "Worker " + this.name;
+        if (this.previous) {
+            return this.previous + " -> " + me;
+        } else {
+            return me;
+        }
+    }
+};
+
+module.exports = GenericWorker;

+ 214 - 0
libs/jszip/lib/stream/StreamHelper.js

@@ -0,0 +1,214 @@
+"use strict";
+
+var utils = require("../utils");
+var ConvertWorker = require("./ConvertWorker");
+var GenericWorker = require("./GenericWorker");
+var base64 = require("../base64");
+var support = require("../support");
+var external = require("../external");
+
+var NodejsStreamOutputAdapter = null;
+if (support.nodestream) {
+    try {
+        NodejsStreamOutputAdapter = require("../nodejs/NodejsStreamOutputAdapter");
+    } catch(e) {
+        // ignore
+    }
+}
+
+/**
+ * Apply the final transformation of the data. If the user wants a Blob for
+ * example, it's easier to work with an U8intArray and finally do the
+ * ArrayBuffer/Blob conversion.
+ * @param {String} type the name of the final type
+ * @param {String|Uint8Array|Buffer} content the content to transform
+ * @param {String} mimeType the mime type of the content, if applicable.
+ * @return {String|Uint8Array|ArrayBuffer|Buffer|Blob} the content in the right format.
+ */
+function transformZipOutput(type, content, mimeType) {
+    switch(type) {
+    case "blob" :
+        return utils.newBlob(utils.transformTo("arraybuffer", content), mimeType);
+    case "base64" :
+        return base64.encode(content);
+    default :
+        return utils.transformTo(type, content);
+    }
+}
+
+/**
+ * Concatenate an array of data of the given type.
+ * @param {String} type the type of the data in the given array.
+ * @param {Array} dataArray the array containing the data chunks to concatenate
+ * @return {String|Uint8Array|Buffer} the concatenated data
+ * @throws Error if the asked type is unsupported
+ */
+function concat (type, dataArray) {
+    var i, index = 0, res = null, totalLength = 0;
+    for(i = 0; i < dataArray.length; i++) {
+        totalLength += dataArray[i].length;
+    }
+    switch(type) {
+    case "string":
+        return dataArray.join("");
+    case "array":
+        return Array.prototype.concat.apply([], dataArray);
+    case "uint8array":
+        res = new Uint8Array(totalLength);
+        for(i = 0; i < dataArray.length; i++) {
+            res.set(dataArray[i], index);
+            index += dataArray[i].length;
+        }
+        return res;
+    case "nodebuffer":
+        return Buffer.concat(dataArray);
+    default:
+        throw new Error("concat : unsupported type '"  + type + "'");
+    }
+}
+
+/**
+ * Listen a StreamHelper, accumulate its content and concatenate it into a
+ * complete block.
+ * @param {StreamHelper} helper the helper to use.
+ * @param {Function} updateCallback a callback called on each update. Called
+ * with one arg :
+ * - the metadata linked to the update received.
+ * @return Promise the promise for the accumulation.
+ */
+function accumulate(helper, updateCallback) {
+    return new external.Promise(function (resolve, reject){
+        var dataArray = [];
+        var chunkType = helper._internalType,
+            resultType = helper._outputType,
+            mimeType = helper._mimeType;
+        helper
+            .on("data", function (data, meta) {
+                dataArray.push(data);
+                if(updateCallback) {
+                    updateCallback(meta);
+                }
+            })
+            .on("error", function(err) {
+                dataArray = [];
+                reject(err);
+            })
+            .on("end", function (){
+                try {
+                    var result = transformZipOutput(resultType, concat(chunkType, dataArray), mimeType);
+                    resolve(result);
+                } catch (e) {
+                    reject(e);
+                }
+                dataArray = [];
+            })
+            .resume();
+    });
+}
+
+/**
+ * An helper to easily use workers outside of JSZip.
+ * @constructor
+ * @param {Worker} worker the worker to wrap
+ * @param {String} outputType the type of data expected by the use
+ * @param {String} mimeType the mime type of the content, if applicable.
+ */
+function StreamHelper(worker, outputType, mimeType) {
+    var internalType = outputType;
+    switch(outputType) {
+    case "blob":
+    case "arraybuffer":
+        internalType = "uint8array";
+        break;
+    case "base64":
+        internalType = "string";
+        break;
+    }
+
+    try {
+        // the type used internally
+        this._internalType = internalType;
+        // the type used to output results
+        this._outputType = outputType;
+        // the mime type
+        this._mimeType = mimeType;
+        utils.checkSupport(internalType);
+        this._worker = worker.pipe(new ConvertWorker(internalType));
+        // the last workers can be rewired without issues but we need to
+        // prevent any updates on previous workers.
+        worker.lock();
+    } catch(e) {
+        this._worker = new GenericWorker("error");
+        this._worker.error(e);
+    }
+}
+
+StreamHelper.prototype = {
+    /**
+     * Listen a StreamHelper, accumulate its content and concatenate it into a
+     * complete block.
+     * @param {Function} updateCb the update callback.
+     * @return Promise the promise for the accumulation.
+     */
+    accumulate : function (updateCb) {
+        return accumulate(this, updateCb);
+    },
+    /**
+     * Add a listener on an event triggered on a stream.
+     * @param {String} evt the name of the event
+     * @param {Function} fn the listener
+     * @return {StreamHelper} the current helper.
+     */
+    on : function (evt, fn) {
+        var self = this;
+
+        if(evt === "data") {
+            this._worker.on(evt, function (chunk) {
+                fn.call(self, chunk.data, chunk.meta);
+            });
+        } else {
+            this._worker.on(evt, function () {
+                utils.delay(fn, arguments, self);
+            });
+        }
+        return this;
+    },
+    /**
+     * Resume the flow of chunks.
+     * @return {StreamHelper} the current helper.
+     */
+    resume : function () {
+        utils.delay(this._worker.resume, [], this._worker);
+        return this;
+    },
+    /**
+     * Pause the flow of chunks.
+     * @return {StreamHelper} the current helper.
+     */
+    pause : function () {
+        this._worker.pause();
+        return this;
+    },
+    /**
+     * Return a nodejs stream for this helper.
+     * @param {Function} updateCb the update callback.
+     * @return {NodejsStreamOutputAdapter} the nodejs stream.
+     */
+    toNodejsStream : function (updateCb) {
+        utils.checkSupport("nodestream");
+        if (this._outputType !== "nodebuffer") {
+            // an object stream containing blob/arraybuffer/uint8array/string
+            // is strange and I don't know if it would be useful.
+            // I you find this comment and have a good usecase, please open a
+            // bug report !
+            throw new Error(this._outputType + " is not supported by this method");
+        }
+
+        return new NodejsStreamOutputAdapter(this, {
+            objectMode : this._outputType !== "nodebuffer"
+        }, updateCb);
+    }
+};
+
+
+module.exports = StreamHelper;

+ 38 - 0
libs/jszip/lib/support.js

@@ -0,0 +1,38 @@
+"use strict";
+
+exports.base64 = true;
+exports.array = true;
+exports.string = true;
+exports.arraybuffer = typeof ArrayBuffer !== "undefined" && typeof Uint8Array !== "undefined";
+exports.nodebuffer = typeof Buffer !== "undefined";
+// contains true if JSZip can read/generate Uint8Array, false otherwise.
+exports.uint8array = typeof Uint8Array !== "undefined";
+
+if (typeof ArrayBuffer === "undefined") {
+    exports.blob = false;
+}
+else {
+    var buffer = new ArrayBuffer(0);
+    try {
+        exports.blob = new Blob([buffer], {
+            type: "application/zip"
+        }).size === 0;
+    }
+    catch (e) {
+        try {
+            var Builder = self.BlobBuilder || self.WebKitBlobBuilder || self.MozBlobBuilder || self.MSBlobBuilder;
+            var builder = new Builder();
+            builder.append(buffer);
+            exports.blob = builder.getBlob("application/zip").size === 0;
+        }
+        catch (e) {
+            exports.blob = false;
+        }
+    }
+}
+
+try {
+    exports.nodestream = !!require("readable-stream").Readable;
+} catch(e) {
+    exports.nodestream = false;
+}

+ 275 - 0
libs/jszip/lib/utf8.js

@@ -0,0 +1,275 @@
+"use strict";
+
+var utils = require("./utils");
+var support = require("./support");
+var nodejsUtils = require("./nodejsUtils");
+var GenericWorker = require("./stream/GenericWorker");
+
+/**
+ * The following functions come from pako, from pako/lib/utils/strings
+ * released under the MIT license, see pako https://github.com/nodeca/pako/
+ */
+
+// Table with utf8 lengths (calculated by first byte of sequence)
+// Note, that 5 & 6-byte values and some 4-byte values can not be represented in JS,
+// because max possible codepoint is 0x10ffff
+var _utf8len = new Array(256);
+for (var i=0; i<256; i++) {
+    _utf8len[i] = (i >= 252 ? 6 : i >= 248 ? 5 : i >= 240 ? 4 : i >= 224 ? 3 : i >= 192 ? 2 : 1);
+}
+_utf8len[254]=_utf8len[254]=1; // Invalid sequence start
+
+// convert string to array (typed, when possible)
+var string2buf = function (str) {
+    var buf, c, c2, m_pos, i, str_len = str.length, buf_len = 0;
+
+    // count binary size
+    for (m_pos = 0; m_pos < str_len; m_pos++) {
+        c = str.charCodeAt(m_pos);
+        if ((c & 0xfc00) === 0xd800 && (m_pos+1 < str_len)) {
+            c2 = str.charCodeAt(m_pos+1);
+            if ((c2 & 0xfc00) === 0xdc00) {
+                c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00);
+                m_pos++;
+            }
+        }
+        buf_len += c < 0x80 ? 1 : c < 0x800 ? 2 : c < 0x10000 ? 3 : 4;
+    }
+
+    // allocate buffer
+    if (support.uint8array) {
+        buf = new Uint8Array(buf_len);
+    } else {
+        buf = new Array(buf_len);
+    }
+
+    // convert
+    for (i=0, m_pos = 0; i < buf_len; m_pos++) {
+        c = str.charCodeAt(m_pos);
+        if ((c & 0xfc00) === 0xd800 && (m_pos+1 < str_len)) {
+            c2 = str.charCodeAt(m_pos+1);
+            if ((c2 & 0xfc00) === 0xdc00) {
+                c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00);
+                m_pos++;
+            }
+        }
+        if (c < 0x80) {
+            /* one byte */
+            buf[i++] = c;
+        } else if (c < 0x800) {
+            /* two bytes */
+            buf[i++] = 0xC0 | (c >>> 6);
+            buf[i++] = 0x80 | (c & 0x3f);
+        } else if (c < 0x10000) {
+            /* three bytes */
+            buf[i++] = 0xE0 | (c >>> 12);
+            buf[i++] = 0x80 | (c >>> 6 & 0x3f);
+            buf[i++] = 0x80 | (c & 0x3f);
+        } else {
+            /* four bytes */
+            buf[i++] = 0xf0 | (c >>> 18);
+            buf[i++] = 0x80 | (c >>> 12 & 0x3f);
+            buf[i++] = 0x80 | (c >>> 6 & 0x3f);
+            buf[i++] = 0x80 | (c & 0x3f);
+        }
+    }
+
+    return buf;
+};
+
+// Calculate max possible position in utf8 buffer,
+// that will not break sequence. If that's not possible
+// - (very small limits) return max size as is.
+//
+// buf[] - utf8 bytes array
+// max   - length limit (mandatory);
+var utf8border = function(buf, max) {
+    var pos;
+
+    max = max || buf.length;
+    if (max > buf.length) { max = buf.length; }
+
+    // go back from last position, until start of sequence found
+    pos = max-1;
+    while (pos >= 0 && (buf[pos] & 0xC0) === 0x80) { pos--; }
+
+    // Fuckup - very small and broken sequence,
+    // return max, because we should return something anyway.
+    if (pos < 0) { return max; }
+
+    // If we came to start of buffer - that means vuffer is too small,
+    // return max too.
+    if (pos === 0) { return max; }
+
+    return (pos + _utf8len[buf[pos]] > max) ? pos : max;
+};
+
+// convert array to string
+var buf2string = function (buf) {
+    var i, out, c, c_len;
+    var len = buf.length;
+
+    // Reserve max possible length (2 words per char)
+    // NB: by unknown reasons, Array is significantly faster for
+    //     String.fromCharCode.apply than Uint16Array.
+    var utf16buf = new Array(len*2);
+
+    for (out=0, i=0; i<len;) {
+        c = buf[i++];
+        // quick process ascii
+        if (c < 0x80) { utf16buf[out++] = c; continue; }
+
+        c_len = _utf8len[c];
+        // skip 5 & 6 byte codes
+        if (c_len > 4) { utf16buf[out++] = 0xfffd; i += c_len-1; continue; }
+
+        // apply mask on first byte
+        c &= c_len === 2 ? 0x1f : c_len === 3 ? 0x0f : 0x07;
+        // join the rest
+        while (c_len > 1 && i < len) {
+            c = (c << 6) | (buf[i++] & 0x3f);
+            c_len--;
+        }
+
+        // terminated by end of string?
+        if (c_len > 1) { utf16buf[out++] = 0xfffd; continue; }
+
+        if (c < 0x10000) {
+            utf16buf[out++] = c;
+        } else {
+            c -= 0x10000;
+            utf16buf[out++] = 0xd800 | ((c >> 10) & 0x3ff);
+            utf16buf[out++] = 0xdc00 | (c & 0x3ff);
+        }
+    }
+
+    // shrinkBuf(utf16buf, out)
+    if (utf16buf.length !== out) {
+        if(utf16buf.subarray) {
+            utf16buf = utf16buf.subarray(0, out);
+        } else {
+            utf16buf.length = out;
+        }
+    }
+
+    // return String.fromCharCode.apply(null, utf16buf);
+    return utils.applyFromCharCode(utf16buf);
+};
+
+
+// That's all for the pako functions.
+
+
+/**
+ * Transform a javascript string into an array (typed if possible) of bytes,
+ * UTF-8 encoded.
+ * @param {String} str the string to encode
+ * @return {Array|Uint8Array|Buffer} the UTF-8 encoded string.
+ */
+exports.utf8encode = function utf8encode(str) {
+    if (support.nodebuffer) {
+        return nodejsUtils.newBufferFrom(str, "utf-8");
+    }
+
+    return string2buf(str);
+};
+
+
+/**
+ * Transform a bytes array (or a representation) representing an UTF-8 encoded
+ * string into a javascript string.
+ * @param {Array|Uint8Array|Buffer} buf the data de decode
+ * @return {String} the decoded string.
+ */
+exports.utf8decode = function utf8decode(buf) {
+    if (support.nodebuffer) {
+        return utils.transformTo("nodebuffer", buf).toString("utf-8");
+    }
+
+    buf = utils.transformTo(support.uint8array ? "uint8array" : "array", buf);
+
+    return buf2string(buf);
+};
+
+/**
+ * A worker to decode utf8 encoded binary chunks into string chunks.
+ * @constructor
+ */
+function Utf8DecodeWorker() {
+    GenericWorker.call(this, "utf-8 decode");
+    // the last bytes if a chunk didn't end with a complete codepoint.
+    this.leftOver = null;
+}
+utils.inherits(Utf8DecodeWorker, GenericWorker);
+
+/**
+ * @see GenericWorker.processChunk
+ */
+Utf8DecodeWorker.prototype.processChunk = function (chunk) {
+
+    var data = utils.transformTo(support.uint8array ? "uint8array" : "array", chunk.data);
+
+    // 1st step, re-use what's left of the previous chunk
+    if (this.leftOver && this.leftOver.length) {
+        if(support.uint8array) {
+            var previousData = data;
+            data = new Uint8Array(previousData.length + this.leftOver.length);
+            data.set(this.leftOver, 0);
+            data.set(previousData, this.leftOver.length);
+        } else {
+            data = this.leftOver.concat(data);
+        }
+        this.leftOver = null;
+    }
+
+    var nextBoundary = utf8border(data);
+    var usableData = data;
+    if (nextBoundary !== data.length) {
+        if (support.uint8array) {
+            usableData = data.subarray(0, nextBoundary);
+            this.leftOver = data.subarray(nextBoundary, data.length);
+        } else {
+            usableData = data.slice(0, nextBoundary);
+            this.leftOver = data.slice(nextBoundary, data.length);
+        }
+    }
+
+    this.push({
+        data : exports.utf8decode(usableData),
+        meta : chunk.meta
+    });
+};
+
+/**
+ * @see GenericWorker.flush
+ */
+Utf8DecodeWorker.prototype.flush = function () {
+    if(this.leftOver && this.leftOver.length) {
+        this.push({
+            data : exports.utf8decode(this.leftOver),
+            meta : {}
+        });
+        this.leftOver = null;
+    }
+};
+exports.Utf8DecodeWorker = Utf8DecodeWorker;
+
+/**
+ * A worker to endcode string chunks into utf8 encoded binary chunks.
+ * @constructor
+ */
+function Utf8EncodeWorker() {
+    GenericWorker.call(this, "utf-8 encode");
+}
+utils.inherits(Utf8EncodeWorker, GenericWorker);
+
+/**
+ * @see GenericWorker.processChunk
+ */
+Utf8EncodeWorker.prototype.processChunk = function (chunk) {
+    this.push({
+        data : exports.utf8encode(chunk.data),
+        meta : chunk.meta
+    });
+};
+exports.Utf8EncodeWorker = Utf8EncodeWorker;

+ 501 - 0
libs/jszip/lib/utils.js

@@ -0,0 +1,501 @@
+"use strict";
+
+var support = require("./support");
+var base64 = require("./base64");
+var nodejsUtils = require("./nodejsUtils");
+var external = require("./external");
+require("setimmediate");
+
+
+/**
+ * Convert a string that pass as a "binary string": it should represent a byte
+ * array but may have > 255 char codes. Be sure to take only the first byte
+ * and returns the byte array.
+ * @param {String} str the string to transform.
+ * @return {Array|Uint8Array} the string in a binary format.
+ */
+function string2binary(str) {
+    var result = null;
+    if (support.uint8array) {
+        result = new Uint8Array(str.length);
+    } else {
+        result = new Array(str.length);
+    }
+    return stringToArrayLike(str, result);
+}
+
+/**
+ * Create a new blob with the given content and the given type.
+ * @param {String|ArrayBuffer} part the content to put in the blob. DO NOT use
+ * an Uint8Array because the stock browser of android 4 won't accept it (it
+ * will be silently converted to a string, "[object Uint8Array]").
+ *
+ * Use only ONE part to build the blob to avoid a memory leak in IE11 / Edge:
+ * when a large amount of Array is used to create the Blob, the amount of
+ * memory consumed is nearly 100 times the original data amount.
+ *
+ * @param {String} type the mime type of the blob.
+ * @return {Blob} the created blob.
+ */
+exports.newBlob = function(part, type) {
+    exports.checkSupport("blob");
+
+    try {
+        // Blob constructor
+        return new Blob([part], {
+            type: type
+        });
+    }
+    catch (e) {
+
+        try {
+            // deprecated, browser only, old way
+            var Builder = self.BlobBuilder || self.WebKitBlobBuilder || self.MozBlobBuilder || self.MSBlobBuilder;
+            var builder = new Builder();
+            builder.append(part);
+            return builder.getBlob(type);
+        }
+        catch (e) {
+
+            // well, fuck ?!
+            throw new Error("Bug : can't construct the Blob.");
+        }
+    }
+
+
+};
+/**
+ * The identity function.
+ * @param {Object} input the input.
+ * @return {Object} the same input.
+ */
+function identity(input) {
+    return input;
+}
+
+/**
+ * Fill in an array with a string.
+ * @param {String} str the string to use.
+ * @param {Array|ArrayBuffer|Uint8Array|Buffer} array the array to fill in (will be mutated).
+ * @return {Array|ArrayBuffer|Uint8Array|Buffer} the updated array.
+ */
+function stringToArrayLike(str, array) {
+    for (var i = 0; i < str.length; ++i) {
+        array[i] = str.charCodeAt(i) & 0xFF;
+    }
+    return array;
+}
+
+/**
+ * An helper for the function arrayLikeToString.
+ * This contains static information and functions that
+ * can be optimized by the browser JIT compiler.
+ */
+var arrayToStringHelper = {
+    /**
+     * Transform an array of int into a string, chunk by chunk.
+     * See the performances notes on arrayLikeToString.
+     * @param {Array|ArrayBuffer|Uint8Array|Buffer} array the array to transform.
+     * @param {String} type the type of the array.
+     * @param {Integer} chunk the chunk size.
+     * @return {String} the resulting string.
+     * @throws Error if the chunk is too big for the stack.
+     */
+    stringifyByChunk: function(array, type, chunk) {
+        var result = [], k = 0, len = array.length;
+        // shortcut
+        if (len <= chunk) {
+            return String.fromCharCode.apply(null, array);
+        }
+        while (k < len) {
+            if (type === "array" || type === "nodebuffer") {
+                result.push(String.fromCharCode.apply(null, array.slice(k, Math.min(k + chunk, len))));
+            }
+            else {
+                result.push(String.fromCharCode.apply(null, array.subarray(k, Math.min(k + chunk, len))));
+            }
+            k += chunk;
+        }
+        return result.join("");
+    },
+    /**
+     * Call String.fromCharCode on every item in the array.
+     * This is the naive implementation, which generate A LOT of intermediate string.
+     * This should be used when everything else fail.
+     * @param {Array|ArrayBuffer|Uint8Array|Buffer} array the array to transform.
+     * @return {String} the result.
+     */
+    stringifyByChar: function(array){
+        var resultStr = "";
+        for(var i = 0; i < array.length; i++) {
+            resultStr += String.fromCharCode(array[i]);
+        }
+        return resultStr;
+    },
+    applyCanBeUsed : {
+        /**
+         * true if the browser accepts to use String.fromCharCode on Uint8Array
+         */
+        uint8array : (function () {
+            try {
+                return support.uint8array && String.fromCharCode.apply(null, new Uint8Array(1)).length === 1;
+            } catch (e) {
+                return false;
+            }
+        })(),
+        /**
+         * true if the browser accepts to use String.fromCharCode on nodejs Buffer.
+         */
+        nodebuffer : (function () {
+            try {
+                return support.nodebuffer && String.fromCharCode.apply(null, nodejsUtils.allocBuffer(1)).length === 1;
+            } catch (e) {
+                return false;
+            }
+        })()
+    }
+};
+
+/**
+ * Transform an array-like object to a string.
+ * @param {Array|ArrayBuffer|Uint8Array|Buffer} array the array to transform.
+ * @return {String} the result.
+ */
+function arrayLikeToString(array) {
+    // Performances notes :
+    // --------------------
+    // String.fromCharCode.apply(null, array) is the fastest, see
+    // see http://jsperf.com/converting-a-uint8array-to-a-string/2
+    // but the stack is limited (and we can get huge arrays !).
+    //
+    // result += String.fromCharCode(array[i]); generate too many strings !
+    //
+    // This code is inspired by http://jsperf.com/arraybuffer-to-string-apply-performance/2
+    // TODO : we now have workers that split the work. Do we still need that ?
+    var chunk = 65536,
+        type = exports.getTypeOf(array),
+        canUseApply = true;
+    if (type === "uint8array") {
+        canUseApply = arrayToStringHelper.applyCanBeUsed.uint8array;
+    } else if (type === "nodebuffer") {
+        canUseApply = arrayToStringHelper.applyCanBeUsed.nodebuffer;
+    }
+
+    if (canUseApply) {
+        while (chunk > 1) {
+            try {
+                return arrayToStringHelper.stringifyByChunk(array, type, chunk);
+            } catch (e) {
+                chunk = Math.floor(chunk / 2);
+            }
+        }
+    }
+
+    // no apply or chunk error : slow and painful algorithm
+    // default browser on android 4.*
+    return arrayToStringHelper.stringifyByChar(array);
+}
+
+exports.applyFromCharCode = arrayLikeToString;
+
+
+/**
+ * Copy the data from an array-like to an other array-like.
+ * @param {Array|ArrayBuffer|Uint8Array|Buffer} arrayFrom the origin array.
+ * @param {Array|ArrayBuffer|Uint8Array|Buffer} arrayTo the destination array which will be mutated.
+ * @return {Array|ArrayBuffer|Uint8Array|Buffer} the updated destination array.
+ */
+function arrayLikeToArrayLike(arrayFrom, arrayTo) {
+    for (var i = 0; i < arrayFrom.length; i++) {
+        arrayTo[i] = arrayFrom[i];
+    }
+    return arrayTo;
+}
+
+// a matrix containing functions to transform everything into everything.
+var transform = {};
+
+// string to ?
+transform["string"] = {
+    "string": identity,
+    "array": function(input) {
+        return stringToArrayLike(input, new Array(input.length));
+    },
+    "arraybuffer": function(input) {
+        return transform["string"]["uint8array"](input).buffer;
+    },
+    "uint8array": function(input) {
+        return stringToArrayLike(input, new Uint8Array(input.length));
+    },
+    "nodebuffer": function(input) {
+        return stringToArrayLike(input, nodejsUtils.allocBuffer(input.length));
+    }
+};
+
+// array to ?
+transform["array"] = {
+    "string": arrayLikeToString,
+    "array": identity,
+    "arraybuffer": function(input) {
+        return (new Uint8Array(input)).buffer;
+    },
+    "uint8array": function(input) {
+        return new Uint8Array(input);
+    },
+    "nodebuffer": function(input) {
+        return nodejsUtils.newBufferFrom(input);
+    }
+};
+
+// arraybuffer to ?
+transform["arraybuffer"] = {
+    "string": function(input) {
+        return arrayLikeToString(new Uint8Array(input));
+    },
+    "array": function(input) {
+        return arrayLikeToArrayLike(new Uint8Array(input), new Array(input.byteLength));
+    },
+    "arraybuffer": identity,
+    "uint8array": function(input) {
+        return new Uint8Array(input);
+    },
+    "nodebuffer": function(input) {
+        return nodejsUtils.newBufferFrom(new Uint8Array(input));
+    }
+};
+
+// uint8array to ?
+transform["uint8array"] = {
+    "string": arrayLikeToString,
+    "array": function(input) {
+        return arrayLikeToArrayLike(input, new Array(input.length));
+    },
+    "arraybuffer": function(input) {
+        return input.buffer;
+    },
+    "uint8array": identity,
+    "nodebuffer": function(input) {
+        return nodejsUtils.newBufferFrom(input);
+    }
+};
+
+// nodebuffer to ?
+transform["nodebuffer"] = {
+    "string": arrayLikeToString,
+    "array": function(input) {
+        return arrayLikeToArrayLike(input, new Array(input.length));
+    },
+    "arraybuffer": function(input) {
+        return transform["nodebuffer"]["uint8array"](input).buffer;
+    },
+    "uint8array": function(input) {
+        return arrayLikeToArrayLike(input, new Uint8Array(input.length));
+    },
+    "nodebuffer": identity
+};
+
+/**
+ * Transform an input into any type.
+ * The supported output type are : string, array, uint8array, arraybuffer, nodebuffer.
+ * If no output type is specified, the unmodified input will be returned.
+ * @param {String} outputType the output type.
+ * @param {String|Array|ArrayBuffer|Uint8Array|Buffer} input the input to convert.
+ * @throws {Error} an Error if the browser doesn't support the requested output type.
+ */
+exports.transformTo = function(outputType, input) {
+    if (!input) {
+        // undefined, null, etc
+        // an empty string won't harm.
+        input = "";
+    }
+    if (!outputType) {
+        return input;
+    }
+    exports.checkSupport(outputType);
+    var inputType = exports.getTypeOf(input);
+    var result = transform[inputType][outputType](input);
+    return result;
+};
+
+/**
+ * Resolve all relative path components, "." and "..", in a path. If these relative components
+ * traverse above the root then the resulting path will only contain the final path component.
+ *
+ * All empty components, e.g. "//", are removed.
+ * @param {string} path A path with / or \ separators
+ * @returns {string} The path with all relative path components resolved.
+ */
+exports.resolve = function(path) {
+    var parts = path.split("/");
+    var result = [];
+    for (var index = 0; index < parts.length; index++) {
+        var part = parts[index];
+        // Allow the first and last component to be empty for trailing slashes.
+        if (part === "." || (part === "" && index !== 0 && index !== parts.length - 1)) {
+            continue;
+        } else if (part === "..") {
+            result.pop();
+        } else {
+            result.push(part);
+        }
+    }
+    return result.join("/");
+};
+
+/**
+ * Return the type of the input.
+ * The type will be in a format valid for JSZip.utils.transformTo : string, array, uint8array, arraybuffer.
+ * @param {Object} input the input to identify.
+ * @return {String} the (lowercase) type of the input.
+ */
+exports.getTypeOf = function(input) {
+    if (typeof input === "string") {
+        return "string";
+    }
+    if (Object.prototype.toString.call(input) === "[object Array]") {
+        return "array";
+    }
+    if (support.nodebuffer && nodejsUtils.isBuffer(input)) {
+        return "nodebuffer";
+    }
+    if (support.uint8array && input instanceof Uint8Array) {
+        return "uint8array";
+    }
+    if (support.arraybuffer && input instanceof ArrayBuffer) {
+        return "arraybuffer";
+    }
+};
+
+/**
+ * Throw an exception if the type is not supported.
+ * @param {String} type the type to check.
+ * @throws {Error} an Error if the browser doesn't support the requested type.
+ */
+exports.checkSupport = function(type) {
+    var supported = support[type.toLowerCase()];
+    if (!supported) {
+        throw new Error(type + " is not supported by this platform");
+    }
+};
+
+exports.MAX_VALUE_16BITS = 65535;
+exports.MAX_VALUE_32BITS = -1; // well, "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF" is parsed as -1
+
+/**
+ * Prettify a string read as binary.
+ * @param {string} str the string to prettify.
+ * @return {string} a pretty string.
+ */
+exports.pretty = function(str) {
+    var res = "",
+        code, i;
+    for (i = 0; i < (str || "").length; i++) {
+        code = str.charCodeAt(i);
+        res += "\\x" + (code < 16 ? "0" : "") + code.toString(16).toUpperCase();
+    }
+    return res;
+};
+
+/**
+ * Defer the call of a function.
+ * @param {Function} callback the function to call asynchronously.
+ * @param {Array} args the arguments to give to the callback.
+ */
+exports.delay = function(callback, args, self) {
+    setImmediate(function () {
+        callback.apply(self || null, args || []);
+    });
+};
+
+/**
+ * Extends a prototype with an other, without calling a constructor with
+ * side effects. Inspired by nodejs' `utils.inherits`
+ * @param {Function} ctor the constructor to augment
+ * @param {Function} superCtor the parent constructor to use
+ */
+exports.inherits = function (ctor, superCtor) {
+    var Obj = function() {};
+    Obj.prototype = superCtor.prototype;
+    ctor.prototype = new Obj();
+};
+
+/**
+ * Merge the objects passed as parameters into a new one.
+ * @private
+ * @param {...Object} var_args All objects to merge.
+ * @return {Object} a new object with the data of the others.
+ */
+exports.extend = function() {
+    var result = {}, i, attr;
+    for (i = 0; i < arguments.length; i++) { // arguments is not enumerable in some browsers
+        for (attr in arguments[i]) {
+            if (Object.prototype.hasOwnProperty.call(arguments[i], attr) && typeof result[attr] === "undefined") {
+                result[attr] = arguments[i][attr];
+            }
+        }
+    }
+    return result;
+};
+
+/**
+ * Transform arbitrary content into a Promise.
+ * @param {String} name a name for the content being processed.
+ * @param {Object} inputData the content to process.
+ * @param {Boolean} isBinary true if the content is not an unicode string
+ * @param {Boolean} isOptimizedBinaryString true if the string content only has one byte per character.
+ * @param {Boolean} isBase64 true if the string content is encoded with base64.
+ * @return {Promise} a promise in a format usable by JSZip.
+ */
+exports.prepareContent = function(name, inputData, isBinary, isOptimizedBinaryString, isBase64) {
+
+    // if inputData is already a promise, this flatten it.
+    var promise = external.Promise.resolve(inputData).then(function(data) {
+
+
+        var isBlob = support.blob && (data instanceof Blob || ["[object File]", "[object Blob]"].indexOf(Object.prototype.toString.call(data)) !== -1);
+
+        if (isBlob && typeof FileReader !== "undefined") {
+            return new external.Promise(function (resolve, reject) {
+                var reader = new FileReader();
+
+                reader.onload = function(e) {
+                    resolve(e.target.result);
+                };
+                reader.onerror = function(e) {
+                    reject(e.target.error);
+                };
+                reader.readAsArrayBuffer(data);
+            });
+        } else {
+            return data;
+        }
+    });
+
+    return promise.then(function(data) {
+        var dataType = exports.getTypeOf(data);
+
+        if (!dataType) {
+            return external.Promise.reject(
+                new Error("Can't read the data of '" + name + "'. Is it " +
+                          "in a supported JavaScript type (String, Blob, ArrayBuffer, etc) ?")
+            );
+        }
+        // special case : it's way easier to work with Uint8Array than with ArrayBuffer
+        if (dataType === "arraybuffer") {
+            data = exports.transformTo("uint8array", data);
+        } else if (dataType === "string") {
+            if (isBase64) {
+                data = base64.decode(data);
+            }
+            else if (isBinary) {
+                // optimizedBinaryString === true means that the file has already been filtered with a 0xFF mask
+                if (isOptimizedBinaryString !== true) {
+                    // this is a string, not in a base64 format.
+                    // Be sure that this is a correct "binary string"
+                    data = string2binary(data);
+                }
+            }
+        }
+        return data;
+    });
+};

+ 261 - 0
libs/jszip/lib/zipEntries.js

@@ -0,0 +1,261 @@
+"use strict";
+var readerFor = require("./reader/readerFor");
+var utils = require("./utils");
+var sig = require("./signature");
+var ZipEntry = require("./zipEntry");
+var support = require("./support");
+//  class ZipEntries {{{
+/**
+ * All the entries in the zip file.
+ * @constructor
+ * @param {Object} loadOptions Options for loading the stream.
+ */
+function ZipEntries(loadOptions) {
+    this.files = [];
+    this.loadOptions = loadOptions;
+}
+ZipEntries.prototype = {
+    /**
+     * Check that the reader is on the specified signature.
+     * @param {string} expectedSignature the expected signature.
+     * @throws {Error} if it is an other signature.
+     */
+    checkSignature: function(expectedSignature) {
+        if (!this.reader.readAndCheckSignature(expectedSignature)) {
+            this.reader.index -= 4;
+            var signature = this.reader.readString(4);
+            throw new Error("Corrupted zip or bug: unexpected signature " + "(" + utils.pretty(signature) + ", expected " + utils.pretty(expectedSignature) + ")");
+        }
+    },
+    /**
+     * Check if the given signature is at the given index.
+     * @param {number} askedIndex the index to check.
+     * @param {string} expectedSignature the signature to expect.
+     * @return {boolean} true if the signature is here, false otherwise.
+     */
+    isSignature: function(askedIndex, expectedSignature) {
+        var currentIndex = this.reader.index;
+        this.reader.setIndex(askedIndex);
+        var signature = this.reader.readString(4);
+        var result = signature === expectedSignature;
+        this.reader.setIndex(currentIndex);
+        return result;
+    },
+    /**
+     * Read the end of the central directory.
+     */
+    readBlockEndOfCentral: function() {
+        this.diskNumber = this.reader.readInt(2);
+        this.diskWithCentralDirStart = this.reader.readInt(2);
+        this.centralDirRecordsOnThisDisk = this.reader.readInt(2);
+        this.centralDirRecords = this.reader.readInt(2);
+        this.centralDirSize = this.reader.readInt(4);
+        this.centralDirOffset = this.reader.readInt(4);
+
+        this.zipCommentLength = this.reader.readInt(2);
+        // warning : the encoding depends of the system locale
+        // On a linux machine with LANG=en_US.utf8, this field is utf8 encoded.
+        // On a windows machine, this field is encoded with the localized windows code page.
+        var zipComment = this.reader.readData(this.zipCommentLength);
+        var decodeParamType = support.uint8array ? "uint8array" : "array";
+        // To get consistent behavior with the generation part, we will assume that
+        // this is utf8 encoded unless specified otherwise.
+        var decodeContent = utils.transformTo(decodeParamType, zipComment);
+        this.zipComment = this.loadOptions.decodeFileName(decodeContent);
+    },
+    /**
+     * Read the end of the Zip 64 central directory.
+     * Not merged with the method readEndOfCentral :
+     * The end of central can coexist with its Zip64 brother,
+     * I don't want to read the wrong number of bytes !
+     */
+    readBlockZip64EndOfCentral: function() {
+        this.zip64EndOfCentralSize = this.reader.readInt(8);
+        this.reader.skip(4);
+        // this.versionMadeBy = this.reader.readString(2);
+        // this.versionNeeded = this.reader.readInt(2);
+        this.diskNumber = this.reader.readInt(4);
+        this.diskWithCentralDirStart = this.reader.readInt(4);
+        this.centralDirRecordsOnThisDisk = this.reader.readInt(8);
+        this.centralDirRecords = this.reader.readInt(8);
+        this.centralDirSize = this.reader.readInt(8);
+        this.centralDirOffset = this.reader.readInt(8);
+
+        this.zip64ExtensibleData = {};
+        var extraDataSize = this.zip64EndOfCentralSize - 44,
+            index = 0,
+            extraFieldId,
+            extraFieldLength,
+            extraFieldValue;
+        while (index < extraDataSize) {
+            extraFieldId = this.reader.readInt(2);
+            extraFieldLength = this.reader.readInt(4);
+            extraFieldValue = this.reader.readData(extraFieldLength);
+            this.zip64ExtensibleData[extraFieldId] = {
+                id: extraFieldId,
+                length: extraFieldLength,
+                value: extraFieldValue
+            };
+        }
+    },
+    /**
+     * Read the end of the Zip 64 central directory locator.
+     */
+    readBlockZip64EndOfCentralLocator: function() {
+        this.diskWithZip64CentralDirStart = this.reader.readInt(4);
+        this.relativeOffsetEndOfZip64CentralDir = this.reader.readInt(8);
+        this.disksCount = this.reader.readInt(4);
+        if (this.disksCount > 1) {
+            throw new Error("Multi-volumes zip are not supported");
+        }
+    },
+    /**
+     * Read the local files, based on the offset read in the central part.
+     */
+    readLocalFiles: function() {
+        var i, file;
+        for (i = 0; i < this.files.length; i++) {
+            file = this.files[i];
+            this.reader.setIndex(file.localHeaderOffset);
+            this.checkSignature(sig.LOCAL_FILE_HEADER);
+            file.readLocalPart(this.reader);
+            file.handleUTF8();
+            file.processAttributes();
+        }
+    },
+    /**
+     * Read the central directory.
+     */
+    readCentralDir: function() {
+        var file;
+
+        this.reader.setIndex(this.centralDirOffset);
+        while (this.reader.readAndCheckSignature(sig.CENTRAL_FILE_HEADER)) {
+            file = new ZipEntry({
+                zip64: this.zip64
+            }, this.loadOptions);
+            file.readCentralPart(this.reader);
+            this.files.push(file);
+        }
+
+        if (this.centralDirRecords !== this.files.length) {
+            if (this.centralDirRecords !== 0 && this.files.length === 0) {
+                // We expected some records but couldn't find ANY.
+                // This is really suspicious, as if something went wrong.
+                throw new Error("Corrupted zip or bug: expected " + this.centralDirRecords + " records in central dir, got " + this.files.length);
+            } else {
+                // We found some records but not all.
+                // Something is wrong but we got something for the user: no error here.
+                // console.warn("expected", this.centralDirRecords, "records in central dir, got", this.files.length);
+            }
+        }
+    },
+    /**
+     * Read the end of central directory.
+     */
+    readEndOfCentral: function() {
+        var offset = this.reader.lastIndexOfSignature(sig.CENTRAL_DIRECTORY_END);
+        if (offset < 0) {
+            // Check if the content is a truncated zip or complete garbage.
+            // A "LOCAL_FILE_HEADER" is not required at the beginning (auto
+            // extractible zip for example) but it can give a good hint.
+            // If an ajax request was used without responseType, we will also
+            // get unreadable data.
+            var isGarbage = !this.isSignature(0, sig.LOCAL_FILE_HEADER);
+
+            if (isGarbage) {
+                throw new Error("Can't find end of central directory : is this a zip file ? " +
+                                "If it is, see https://stuk.github.io/jszip/documentation/howto/read_zip.html");
+            } else {
+                throw new Error("Corrupted zip: can't find end of central directory");
+            }
+
+        }
+        this.reader.setIndex(offset);
+        var endOfCentralDirOffset = offset;
+        this.checkSignature(sig.CENTRAL_DIRECTORY_END);
+        this.readBlockEndOfCentral();
+
+
+        /* extract from the zip spec :
+            4)  If one of the fields in the end of central directory
+                record is too small to hold required data, the field
+                should be set to -1 (0xFFFF or 0xFFFFFFFF) and the
+                ZIP64 format record should be created.
+            5)  The end of central directory record and the
+                Zip64 end of central directory locator record must
+                reside on the same disk when splitting or spanning
+                an archive.
+         */
+        if (this.diskNumber === utils.MAX_VALUE_16BITS || this.diskWithCentralDirStart === utils.MAX_VALUE_16BITS || this.centralDirRecordsOnThisDisk === utils.MAX_VALUE_16BITS || this.centralDirRecords === utils.MAX_VALUE_16BITS || this.centralDirSize === utils.MAX_VALUE_32BITS || this.centralDirOffset === utils.MAX_VALUE_32BITS) {
+            this.zip64 = true;
+
+            /*
+            Warning : the zip64 extension is supported, but ONLY if the 64bits integer read from
+            the zip file can fit into a 32bits integer. This cannot be solved : JavaScript represents
+            all numbers as 64-bit double precision IEEE 754 floating point numbers.
+            So, we have 53bits for integers and bitwise operations treat everything as 32bits.
+            see https://developer.mozilla.org/en-US/docs/JavaScript/Reference/Operators/Bitwise_Operators
+            and http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-262.pdf section 8.5
+            */
+
+            // should look for a zip64 EOCD locator
+            offset = this.reader.lastIndexOfSignature(sig.ZIP64_CENTRAL_DIRECTORY_LOCATOR);
+            if (offset < 0) {
+                throw new Error("Corrupted zip: can't find the ZIP64 end of central directory locator");
+            }
+            this.reader.setIndex(offset);
+            this.checkSignature(sig.ZIP64_CENTRAL_DIRECTORY_LOCATOR);
+            this.readBlockZip64EndOfCentralLocator();
+
+            // now the zip64 EOCD record
+            if (!this.isSignature(this.relativeOffsetEndOfZip64CentralDir, sig.ZIP64_CENTRAL_DIRECTORY_END)) {
+                // console.warn("ZIP64 end of central directory not where expected.");
+                this.relativeOffsetEndOfZip64CentralDir = this.reader.lastIndexOfSignature(sig.ZIP64_CENTRAL_DIRECTORY_END);
+                if (this.relativeOffsetEndOfZip64CentralDir < 0) {
+                    throw new Error("Corrupted zip: can't find the ZIP64 end of central directory");
+                }
+            }
+            this.reader.setIndex(this.relativeOffsetEndOfZip64CentralDir);
+            this.checkSignature(sig.ZIP64_CENTRAL_DIRECTORY_END);
+            this.readBlockZip64EndOfCentral();
+        }
+
+        var expectedEndOfCentralDirOffset = this.centralDirOffset + this.centralDirSize;
+        if (this.zip64) {
+            expectedEndOfCentralDirOffset += 20; // end of central dir 64 locator
+            expectedEndOfCentralDirOffset += 12 /* should not include the leading 12 bytes */ + this.zip64EndOfCentralSize;
+        }
+
+        var extraBytes = endOfCentralDirOffset - expectedEndOfCentralDirOffset;
+
+        if (extraBytes > 0) {
+            // console.warn(extraBytes, "extra bytes at beginning or within zipfile");
+            if (this.isSignature(endOfCentralDirOffset, sig.CENTRAL_FILE_HEADER)) {
+                // The offsets seem wrong, but we have something at the specified offset.
+                // So… we keep it.
+            } else {
+                // the offset is wrong, update the "zero" of the reader
+                // this happens if data has been prepended (crx files for example)
+                this.reader.zero = extraBytes;
+            }
+        } else if (extraBytes < 0) {
+            throw new Error("Corrupted zip: missing " + Math.abs(extraBytes) + " bytes.");
+        }
+    },
+    prepareReader: function(data) {
+        this.reader = readerFor(data);
+    },
+    /**
+     * Read a zip file and create ZipEntries.
+     * @param {String|ArrayBuffer|Uint8Array|Buffer} data the binary string representing a zip file.
+     */
+    load: function(data) {
+        this.prepareReader(data);
+        this.readEndOfCentral();
+        this.readCentralDir();
+        this.readLocalFiles();
+    }
+};
+// }}} end of ZipEntries
+module.exports = ZipEntries;

+ 293 - 0
libs/jszip/lib/zipEntry.js

@@ -0,0 +1,293 @@
+"use strict";
+var readerFor = require("./reader/readerFor");
+var utils = require("./utils");
+var CompressedObject = require("./compressedObject");
+var crc32fn = require("./crc32");
+var utf8 = require("./utf8");
+var compressions = require("./compressions");
+var support = require("./support");
+
+var MADE_BY_DOS = 0x00;
+var MADE_BY_UNIX = 0x03;
+
+/**
+ * Find a compression registered in JSZip.
+ * @param {string} compressionMethod the method magic to find.
+ * @return {Object|null} the JSZip compression object, null if none found.
+ */
+var findCompression = function(compressionMethod) {
+    for (var method in compressions) {
+        if (!Object.prototype.hasOwnProperty.call(compressions, method)) {
+            continue;
+        }
+        if (compressions[method].magic === compressionMethod) {
+            return compressions[method];
+        }
+    }
+    return null;
+};
+
+// class ZipEntry {{{
+/**
+ * An entry in the zip file.
+ * @constructor
+ * @param {Object} options Options of the current file.
+ * @param {Object} loadOptions Options for loading the stream.
+ */
+function ZipEntry(options, loadOptions) {
+    this.options = options;
+    this.loadOptions = loadOptions;
+}
+ZipEntry.prototype = {
+    /**
+     * say if the file is encrypted.
+     * @return {boolean} true if the file is encrypted, false otherwise.
+     */
+    isEncrypted: function() {
+        // bit 1 is set
+        return (this.bitFlag & 0x0001) === 0x0001;
+    },
+    /**
+     * say if the file has utf-8 filename/comment.
+     * @return {boolean} true if the filename/comment is in utf-8, false otherwise.
+     */
+    useUTF8: function() {
+        // bit 11 is set
+        return (this.bitFlag & 0x0800) === 0x0800;
+    },
+    /**
+     * Read the local part of a zip file and add the info in this object.
+     * @param {DataReader} reader the reader to use.
+     */
+    readLocalPart: function(reader) {
+        var compression, localExtraFieldsLength;
+
+        // we already know everything from the central dir !
+        // If the central dir data are false, we are doomed.
+        // On the bright side, the local part is scary  : zip64, data descriptors, both, etc.
+        // The less data we get here, the more reliable this should be.
+        // Let's skip the whole header and dash to the data !
+        reader.skip(22);
+        // in some zip created on windows, the filename stored in the central dir contains \ instead of /.
+        // Strangely, the filename here is OK.
+        // I would love to treat these zip files as corrupted (see http://www.info-zip.org/FAQ.html#backslashes
+        // or APPNOTE#4.4.17.1, "All slashes MUST be forward slashes '/'") but there are a lot of bad zip generators...
+        // Search "unzip mismatching "local" filename continuing with "central" filename version" on
+        // the internet.
+        //
+        // I think I see the logic here : the central directory is used to display
+        // content and the local directory is used to extract the files. Mixing / and \
+        // may be used to display \ to windows users and use / when extracting the files.
+        // Unfortunately, this lead also to some issues : http://seclists.org/fulldisclosure/2009/Sep/394
+        this.fileNameLength = reader.readInt(2);
+        localExtraFieldsLength = reader.readInt(2); // can't be sure this will be the same as the central dir
+        // the fileName is stored as binary data, the handleUTF8 method will take care of the encoding.
+        this.fileName = reader.readData(this.fileNameLength);
+        reader.skip(localExtraFieldsLength);
+
+        if (this.compressedSize === -1 || this.uncompressedSize === -1) {
+            throw new Error("Bug or corrupted zip : didn't get enough information from the central directory " + "(compressedSize === -1 || uncompressedSize === -1)");
+        }
+
+        compression = findCompression(this.compressionMethod);
+        if (compression === null) { // no compression found
+            throw new Error("Corrupted zip : compression " + utils.pretty(this.compressionMethod) + " unknown (inner file : " + utils.transformTo("string", this.fileName) + ")");
+        }
+        this.decompressed = new CompressedObject(this.compressedSize, this.uncompressedSize, this.crc32, compression, reader.readData(this.compressedSize));
+    },
+
+    /**
+     * Read the central part of a zip file and add the info in this object.
+     * @param {DataReader} reader the reader to use.
+     */
+    readCentralPart: function(reader) {
+        this.versionMadeBy = reader.readInt(2);
+        reader.skip(2);
+        // this.versionNeeded = reader.readInt(2);
+        this.bitFlag = reader.readInt(2);
+        this.compressionMethod = reader.readString(2);
+        this.date = reader.readDate();
+        this.crc32 = reader.readInt(4);
+        this.compressedSize = reader.readInt(4);
+        this.uncompressedSize = reader.readInt(4);
+        var fileNameLength = reader.readInt(2);
+        this.extraFieldsLength = reader.readInt(2);
+        this.fileCommentLength = reader.readInt(2);
+        this.diskNumberStart = reader.readInt(2);
+        this.internalFileAttributes = reader.readInt(2);
+        this.externalFileAttributes = reader.readInt(4);
+        this.localHeaderOffset = reader.readInt(4);
+
+        if (this.isEncrypted()) {
+            throw new Error("Encrypted zip are not supported");
+        }
+
+        // will be read in the local part, see the comments there
+        reader.skip(fileNameLength);
+        this.readExtraFields(reader);
+        this.parseZIP64ExtraField(reader);
+        this.fileComment = reader.readData(this.fileCommentLength);
+    },
+
+    /**
+     * Parse the external file attributes and get the unix/dos permissions.
+     */
+    processAttributes: function () {
+        this.unixPermissions = null;
+        this.dosPermissions = null;
+        var madeBy = this.versionMadeBy >> 8;
+
+        // Check if we have the DOS directory flag set.
+        // We look for it in the DOS and UNIX permissions
+        // but some unknown platform could set it as a compatibility flag.
+        this.dir = this.externalFileAttributes & 0x0010 ? true : false;
+
+        if(madeBy === MADE_BY_DOS) {
+            // first 6 bits (0 to 5)
+            this.dosPermissions = this.externalFileAttributes & 0x3F;
+        }
+
+        if(madeBy === MADE_BY_UNIX) {
+            this.unixPermissions = (this.externalFileAttributes >> 16) & 0xFFFF;
+            // the octal permissions are in (this.unixPermissions & 0x01FF).toString(8);
+        }
+
+        // fail safe : if the name ends with a / it probably means a folder
+        if (!this.dir && this.fileNameStr.slice(-1) === "/") {
+            this.dir = true;
+        }
+    },
+
+    /**
+     * Parse the ZIP64 extra field and merge the info in the current ZipEntry.
+     * @param {DataReader} reader the reader to use.
+     */
+    parseZIP64ExtraField: function() {
+        if (!this.extraFields[0x0001]) {
+            return;
+        }
+
+        // should be something, preparing the extra reader
+        var extraReader = readerFor(this.extraFields[0x0001].value);
+
+        // I really hope that these 64bits integer can fit in 32 bits integer, because js
+        // won't let us have more.
+        if (this.uncompressedSize === utils.MAX_VALUE_32BITS) {
+            this.uncompressedSize = extraReader.readInt(8);
+        }
+        if (this.compressedSize === utils.MAX_VALUE_32BITS) {
+            this.compressedSize = extraReader.readInt(8);
+        }
+        if (this.localHeaderOffset === utils.MAX_VALUE_32BITS) {
+            this.localHeaderOffset = extraReader.readInt(8);
+        }
+        if (this.diskNumberStart === utils.MAX_VALUE_32BITS) {
+            this.diskNumberStart = extraReader.readInt(4);
+        }
+    },
+    /**
+     * Read the central part of a zip file and add the info in this object.
+     * @param {DataReader} reader the reader to use.
+     */
+    readExtraFields: function(reader) {
+        var end = reader.index + this.extraFieldsLength,
+            extraFieldId,
+            extraFieldLength,
+            extraFieldValue;
+
+        if (!this.extraFields) {
+            this.extraFields = {};
+        }
+
+        while (reader.index + 4 < end) {
+            extraFieldId = reader.readInt(2);
+            extraFieldLength = reader.readInt(2);
+            extraFieldValue = reader.readData(extraFieldLength);
+
+            this.extraFields[extraFieldId] = {
+                id: extraFieldId,
+                length: extraFieldLength,
+                value: extraFieldValue
+            };
+        }
+
+        reader.setIndex(end);
+    },
+    /**
+     * Apply an UTF8 transformation if needed.
+     */
+    handleUTF8: function() {
+        var decodeParamType = support.uint8array ? "uint8array" : "array";
+        if (this.useUTF8()) {
+            this.fileNameStr = utf8.utf8decode(this.fileName);
+            this.fileCommentStr = utf8.utf8decode(this.fileComment);
+        } else {
+            var upath = this.findExtraFieldUnicodePath();
+            if (upath !== null) {
+                this.fileNameStr = upath;
+            } else {
+                // ASCII text or unsupported code page
+                var fileNameByteArray =  utils.transformTo(decodeParamType, this.fileName);
+                this.fileNameStr = this.loadOptions.decodeFileName(fileNameByteArray);
+            }
+
+            var ucomment = this.findExtraFieldUnicodeComment();
+            if (ucomment !== null) {
+                this.fileCommentStr = ucomment;
+            } else {
+                // ASCII text or unsupported code page
+                var commentByteArray =  utils.transformTo(decodeParamType, this.fileComment);
+                this.fileCommentStr = this.loadOptions.decodeFileName(commentByteArray);
+            }
+        }
+    },
+
+    /**
+     * Find the unicode path declared in the extra field, if any.
+     * @return {String} the unicode path, null otherwise.
+     */
+    findExtraFieldUnicodePath: function() {
+        var upathField = this.extraFields[0x7075];
+        if (upathField) {
+            var extraReader = readerFor(upathField.value);
+
+            // wrong version
+            if (extraReader.readInt(1) !== 1) {
+                return null;
+            }
+
+            // the crc of the filename changed, this field is out of date.
+            if (crc32fn(this.fileName) !== extraReader.readInt(4)) {
+                return null;
+            }
+
+            return utf8.utf8decode(extraReader.readData(upathField.length - 5));
+        }
+        return null;
+    },
+
+    /**
+     * Find the unicode comment declared in the extra field, if any.
+     * @return {String} the unicode comment, null otherwise.
+     */
+    findExtraFieldUnicodeComment: function() {
+        var ucommentField = this.extraFields[0x6375];
+        if (ucommentField) {
+            var extraReader = readerFor(ucommentField.value);
+
+            // wrong version
+            if (extraReader.readInt(1) !== 1) {
+                return null;
+            }
+
+            // the crc of the comment changed, this field is out of date.
+            if (crc32fn(this.fileComment) !== extraReader.readInt(4)) {
+                return null;
+            }
+
+            return utf8.utf8decode(extraReader.readData(ucommentField.length - 5));
+        }
+        return null;
+    }
+};
+module.exports = ZipEntry;

+ 133 - 0
libs/jszip/lib/zipObject.js

@@ -0,0 +1,133 @@
+"use strict";
+
+var StreamHelper = require("./stream/StreamHelper");
+var DataWorker = require("./stream/DataWorker");
+var utf8 = require("./utf8");
+var CompressedObject = require("./compressedObject");
+var GenericWorker = require("./stream/GenericWorker");
+
+/**
+ * A simple object representing a file in the zip file.
+ * @constructor
+ * @param {string} name the name of the file
+ * @param {String|ArrayBuffer|Uint8Array|Buffer} data the data
+ * @param {Object} options the options of the file
+ */
+var ZipObject = function(name, data, options) {
+    this.name = name;
+    this.dir = options.dir;
+    this.date = options.date;
+    this.comment = options.comment;
+    this.unixPermissions = options.unixPermissions;
+    this.dosPermissions = options.dosPermissions;
+
+    this._data = data;
+    this._dataBinary = options.binary;
+    // keep only the compression
+    this.options = {
+        compression : options.compression,
+        compressionOptions : options.compressionOptions
+    };
+};
+
+ZipObject.prototype = {
+    /**
+     * Create an internal stream for the content of this object.
+     * @param {String} type the type of each chunk.
+     * @return StreamHelper the stream.
+     */
+    internalStream: function (type) {
+        var result = null, outputType = "string";
+        try {
+            if (!type) {
+                throw new Error("No output type specified.");
+            }
+            outputType = type.toLowerCase();
+            var askUnicodeString = outputType === "string" || outputType === "text";
+            if (outputType === "binarystring" || outputType === "text") {
+                outputType = "string";
+            }
+            result = this._decompressWorker();
+
+            var isUnicodeString = !this._dataBinary;
+
+            if (isUnicodeString && !askUnicodeString) {
+                result = result.pipe(new utf8.Utf8EncodeWorker());
+            }
+            if (!isUnicodeString && askUnicodeString) {
+                result = result.pipe(new utf8.Utf8DecodeWorker());
+            }
+        } catch (e) {
+            result = new GenericWorker("error");
+            result.error(e);
+        }
+
+        return new StreamHelper(result, outputType, "");
+    },
+
+    /**
+     * Prepare the content in the asked type.
+     * @param {String} type the type of the result.
+     * @param {Function} onUpdate a function to call on each internal update.
+     * @return Promise the promise of the result.
+     */
+    async: function (type, onUpdate) {
+        return this.internalStream(type).accumulate(onUpdate);
+    },
+
+    /**
+     * Prepare the content as a nodejs stream.
+     * @param {String} type the type of each chunk.
+     * @param {Function} onUpdate a function to call on each internal update.
+     * @return Stream the stream.
+     */
+    nodeStream: function (type, onUpdate) {
+        return this.internalStream(type || "nodebuffer").toNodejsStream(onUpdate);
+    },
+
+    /**
+     * Return a worker for the compressed content.
+     * @private
+     * @param {Object} compression the compression object to use.
+     * @param {Object} compressionOptions the options to use when compressing.
+     * @return Worker the worker.
+     */
+    _compressWorker: function (compression, compressionOptions) {
+        if (
+            this._data instanceof CompressedObject &&
+            this._data.compression.magic === compression.magic
+        ) {
+            return this._data.getCompressedWorker();
+        } else {
+            var result = this._decompressWorker();
+            if(!this._dataBinary) {
+                result = result.pipe(new utf8.Utf8EncodeWorker());
+            }
+            return CompressedObject.createWorkerFrom(result, compression, compressionOptions);
+        }
+    },
+    /**
+     * Return a worker for the decompressed content.
+     * @private
+     * @return Worker the worker.
+     */
+    _decompressWorker : function () {
+        if (this._data instanceof CompressedObject) {
+            return this._data.getContentWorker();
+        } else if (this._data instanceof GenericWorker) {
+            return this._data;
+        } else {
+            return new DataWorker(this._data);
+        }
+    }
+};
+
+var removedMethods = ["asText", "asBinary", "asNodeBuffer", "asUint8Array", "asArrayBuffer"];
+var removedFn = function () {
+    throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide.");
+};
+
+for(var i = 0; i < removedMethods.length; i++) {
+    ZipObject.prototype[removedMethods[i]] = removedFn;
+}
+module.exports = ZipObject;

+ 67 - 0
libs/jszip/package.json

@@ -0,0 +1,67 @@
+{
+  "name": "jszip",
+  "version": "3.10.1",
+  "author": "Stuart Knightley <stuart@stuartk.com>",
+  "description": "Create, read and edit .zip files with JavaScript http://stuartk.com/jszip",
+  "scripts": {
+    "test": "npm run test-node && npm run test-browser && tsc",
+    "test-node": "qunit --require ./test/helpers/test-utils.js --require ./test/helpers/node-test-utils.js test/asserts/",
+    "test-browser": "grunt build && node test/run.js --test",
+    "benchmark": "npm run benchmark-node && npm run benchmark-browser",
+    "benchmark-node": "node test/benchmark/node.js",
+    "benchmark-browser": "node test/run.js --benchmark",
+    "lint": "eslint ."
+  },
+  "contributors": [
+    {
+      "name": "Franz Buchinger"
+    },
+    {
+      "name": "António Afonso"
+    },
+    {
+      "name": "David Duponchel"
+    },
+    {
+      "name": "yiminghe"
+    }
+  ],
+  "main": "./lib/index",
+  "browser": {
+    "./lib/index": "./dist/jszip.min.js",
+    "readable-stream": "./lib/readable-stream-browser.js"
+  },
+  "types": "./index.d.ts",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/Stuk/jszip.git"
+  },
+  "keywords": [
+    "zip",
+    "deflate",
+    "inflate"
+  ],
+  "devDependencies": {
+    "benchmark": "^2.1.4",
+    "browserify": "~13.0.0",
+    "eslint": "^8.18.0",
+    "grunt": "~0.4.1",
+    "grunt-browserify": "~5.0.0",
+    "grunt-cli": "~1.1.0",
+    "grunt-contrib-uglify": "~4.0.1",
+    "http-server": "^13.0.2",
+    "jszip-utils": "~0.0.2",
+    "package-json-versionify": "1.0.2",
+    "playwright": "^1.15.2",
+    "qunit": "~2.9.2",
+    "tmp": "0.0.28",
+    "typescript": "^4.6.3"
+  },
+  "dependencies": {
+    "lie": "~3.3.0",
+    "pako": "~1.0.2",
+    "readable-stream": "~2.3.6",
+    "setimmediate": "^1.0.5"
+  },
+  "license": "(MIT OR GPL-3.0-or-later)"
+}

+ 21 - 0
libs/jszip/sponsors.md

@@ -0,0 +1,21 @@
+---
+title: "Sponsors"
+layout: default
+section: main
+---
+
+[JSZip](https://github.com/Stuk/jszip) was created in 2009 by [Stuart](https://github.com/Stuk). Since then it has received well over [600 million downloads](https://npm-stat.com/charts.html?package=jszip&from=2009-06-20&to=2022-06-20), is depended on by over 3000 packages on npm, and powers zipping and unzipping on sites large and small.
+
+This project only exists because of all the work dedicated to it by me and the other contributors.
+
+If you or your company has benefited from JSZip then please consider [sponsoring on Github](https://github.com/sponsors/Stuk).
+
+## 💎 Diamond
+
+## 🥇 Gold
+
+## 🥈 Silver
+
+## 🥉 Bronze
+
+## Supporters

+ 101 - 0
libs/jszip/tsconfig.json

@@ -0,0 +1,101 @@
+{
+  "compilerOptions": {
+    /* Visit https://aka.ms/tsconfig.json to read more about this file */
+
+    /* Projects */
+    // "incremental": true,                              /* Enable incremental compilation */
+    // "composite": true,                                /* Enable constraints that allow a TypeScript project to be used with project references. */
+    // "tsBuildInfoFile": "./",                          /* Specify the folder for .tsbuildinfo incremental compilation files. */
+    // "disableSourceOfProjectReferenceRedirect": true,  /* Disable preferring source files instead of declaration files when referencing composite projects */
+    // "disableSolutionSearching": true,                 /* Opt a project out of multi-project reference checking when editing. */
+    // "disableReferencedProjectLoad": true,             /* Reduce the number of projects loaded automatically by TypeScript. */
+
+    /* Language and Environment */
+    "target": "es2016",                                  /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */
+    // "lib": [],                                        /* Specify a set of bundled library declaration files that describe the target runtime environment. */
+    // "jsx": "preserve",                                /* Specify what JSX code is generated. */
+    // "experimentalDecorators": true,                   /* Enable experimental support for TC39 stage 2 draft decorators. */
+    // "emitDecoratorMetadata": true,                    /* Emit design-type metadata for decorated declarations in source files. */
+    // "jsxFactory": "",                                 /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h' */
+    // "jsxFragmentFactory": "",                         /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
+    // "jsxImportSource": "",                            /* Specify module specifier used to import the JSX factory functions when using `jsx: react-jsx*`.` */
+    // "reactNamespace": "",                             /* Specify the object invoked for `createElement`. This only applies when targeting `react` JSX emit. */
+    // "noLib": true,                                    /* Disable including any library files, including the default lib.d.ts. */
+    // "useDefineForClassFields": true,                  /* Emit ECMAScript-standard-compliant class fields. */
+
+    /* Modules */
+    "module": "commonjs",                                /* Specify what module code is generated. */
+    // "rootDir": "./",                                  /* Specify the root folder within your source files. */
+    // "moduleResolution": "node",                       /* Specify how TypeScript looks up a file from a given module specifier. */
+    // "baseUrl": "./",                                  /* Specify the base directory to resolve non-relative module names. */
+    // "paths": {},                                      /* Specify a set of entries that re-map imports to additional lookup locations. */
+    // "rootDirs": [],                                   /* Allow multiple folders to be treated as one when resolving modules. */
+    // "typeRoots": [],                                  /* Specify multiple folders that act like `./node_modules/@types`. */
+    // "types": [],                                      /* Specify type package names to be included without being referenced in a source file. */
+    // "allowUmdGlobalAccess": true,                     /* Allow accessing UMD globals from modules. */
+    // "resolveJsonModule": true,                        /* Enable importing .json files */
+    // "noResolve": true,                                /* Disallow `import`s, `require`s or `<reference>`s from expanding the number of files TypeScript should add to a project. */
+
+    /* JavaScript Support */
+    // "allowJs": true,                                  /* Allow JavaScript files to be a part of your program. Use the `checkJS` option to get errors from these files. */
+    // "checkJs": true,                                  /* Enable error reporting in type-checked JavaScript files. */
+    // "maxNodeModuleJsDepth": 1,                        /* Specify the maximum folder depth used for checking JavaScript files from `node_modules`. Only applicable with `allowJs`. */
+
+    /* Emit */
+    // "declaration": true,                              /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
+    // "declarationMap": true,                           /* Create sourcemaps for d.ts files. */
+    // "emitDeclarationOnly": true,                      /* Only output d.ts files and not JavaScript files. */
+    // "sourceMap": true,                                /* Create source map files for emitted JavaScript files. */
+    // "outFile": "./",                                  /* Specify a file that bundles all outputs into one JavaScript file. If `declaration` is true, also designates a file that bundles all .d.ts output. */
+    // "outDir": "./",                                   /* Specify an output folder for all emitted files. */
+    // "removeComments": true,                           /* Disable emitting comments. */
+    "noEmit": true,                                   /* Disable emitting files from a compilation. */
+    // "importHelpers": true,                            /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
+    // "importsNotUsedAsValues": "remove",               /* Specify emit/checking behavior for imports that are only used for types */
+    // "downlevelIteration": true,                       /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
+    // "sourceRoot": "",                                 /* Specify the root path for debuggers to find the reference source code. */
+    // "mapRoot": "",                                    /* Specify the location where debugger should locate map files instead of generated locations. */
+    // "inlineSourceMap": true,                          /* Include sourcemap files inside the emitted JavaScript. */
+    // "inlineSources": true,                            /* Include source code in the sourcemaps inside the emitted JavaScript. */
+    // "emitBOM": true,                                  /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
+    // "newLine": "crlf",                                /* Set the newline character for emitting files. */
+    // "stripInternal": true,                            /* Disable emitting declarations that have `@internal` in their JSDoc comments. */
+    // "noEmitHelpers": true,                            /* Disable generating custom helper functions like `__extends` in compiled output. */
+    // "noEmitOnError": true,                            /* Disable emitting files if any type checking errors are reported. */
+    // "preserveConstEnums": true,                       /* Disable erasing `const enum` declarations in generated code. */
+    // "declarationDir": "./",                           /* Specify the output directory for generated declaration files. */
+    // "preserveValueImports": true,                     /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */
+
+    /* Interop Constraints */
+    // "isolatedModules": true,                          /* Ensure that each file can be safely transpiled without relying on other imports. */
+    // "allowSyntheticDefaultImports": true,             /* Allow 'import x from y' when a module doesn't have a default export. */
+    "esModuleInterop": true,                             /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables `allowSyntheticDefaultImports` for type compatibility. */
+    // "preserveSymlinks": true,                         /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
+    "forceConsistentCasingInFileNames": true,            /* Ensure that casing is correct in imports. */
+
+    /* Type Checking */
+    "strict": true,                                      /* Enable all strict type-checking options. */
+    // "noImplicitAny": true,                            /* Enable error reporting for expressions and declarations with an implied `any` type.. */
+    // "strictNullChecks": true,                         /* When type checking, take into account `null` and `undefined`. */
+    // "strictFunctionTypes": true,                      /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
+    // "strictBindCallApply": true,                      /* Check that the arguments for `bind`, `call`, and `apply` methods match the original function. */
+    // "strictPropertyInitialization": true,             /* Check for class properties that are declared but not set in the constructor. */
+    // "noImplicitThis": true,                           /* Enable error reporting when `this` is given the type `any`. */
+    // "useUnknownInCatchVariables": true,               /* Type catch clause variables as 'unknown' instead of 'any'. */
+    // "alwaysStrict": true,                             /* Ensure 'use strict' is always emitted. */
+    // "noUnusedLocals": true,                           /* Enable error reporting when a local variables aren't read. */
+    // "noUnusedParameters": true,                       /* Raise an error when a function parameter isn't read */
+    // "exactOptionalPropertyTypes": true,               /* Interpret optional property types as written, rather than adding 'undefined'. */
+    // "noImplicitReturns": true,                        /* Enable error reporting for codepaths that do not explicitly return in a function. */
+    // "noFallthroughCasesInSwitch": true,               /* Enable error reporting for fallthrough cases in switch statements. */
+    // "noUncheckedIndexedAccess": true,                 /* Include 'undefined' in index signature results */
+    // "noImplicitOverride": true,                       /* Ensure overriding members in derived classes are marked with an override modifier. */
+    // "noPropertyAccessFromIndexSignature": true,       /* Enforces using indexed accessors for keys declared using an indexed type */
+    // "allowUnusedLabels": true,                        /* Disable error reporting for unused labels. */
+    // "allowUnreachableCode": true,                     /* Disable error reporting for unreachable code. */
+
+    /* Completeness */
+    // "skipDefaultLibCheck": true,                      /* Skip type checking .d.ts files that are included with TypeScript. */
+    // "skipLibCheck": true                                 /* Skip type checking all .d.ts files. */
+  }
+}

+ 247 - 0
libs/jszip/vendor/FileSaver.js

@@ -0,0 +1,247 @@
+/*! FileSaver.js
+ *  A saveAs() FileSaver implementation.
+ *  2014-01-24
+ *
+ *  By Eli Grey, http://eligrey.com
+ *  License: X11/MIT
+ *    See LICENSE.md
+ */
+
+/*global self */
+/*jslint bitwise: true, indent: 4, laxbreak: true, laxcomma: true, smarttabs: true, plusplus: true */
+
+/*! @source http://purl.eligrey.com/github/FileSaver.js/blob/main/FileSaver.js */
+
+var saveAs = saveAs
+  // IE 10+ (native saveAs)
+  || (typeof navigator !== "undefined" &&
+      navigator.msSaveOrOpenBlob && navigator.msSaveOrOpenBlob.bind(navigator))
+  // Everyone else
+  || (function(view) {
+	"use strict";
+	// IE <10 is explicitly unsupported
+	if (typeof navigator !== "undefined" &&
+	    /MSIE [1-9]\./.test(navigator.userAgent)) {
+		return;
+	}
+	var
+		  doc = view.document
+		  // only get URL when necessary in case BlobBuilder.js hasn't overridden it yet
+		, get_URL = function() {
+			return view.URL || view.webkitURL || view;
+		}
+		, URL = view.URL || view.webkitURL || view
+		, save_link = doc.createElementNS("http://www.w3.org/1999/xhtml", "a")
+		, can_use_save_link = !view.externalHost && "download" in save_link
+		, click = function(node) {
+			var event = doc.createEvent("MouseEvents");
+			event.initMouseEvent(
+				"click", true, false, view, 0, 0, 0, 0, 0
+				, false, false, false, false, 0, null
+			);
+			node.dispatchEvent(event);
+		}
+		, webkit_req_fs = view.webkitRequestFileSystem
+		, req_fs = view.requestFileSystem || webkit_req_fs || view.mozRequestFileSystem
+		, throw_outside = function(ex) {
+			(view.setImmediate || view.setTimeout)(function() {
+				throw ex;
+			}, 0);
+		}
+		, force_saveable_type = "application/octet-stream"
+		, fs_min_size = 0
+		, deletion_queue = []
+		, process_deletion_queue = function() {
+			var i = deletion_queue.length;
+			while (i--) {
+				var file = deletion_queue[i];
+				if (typeof file === "string") { // file is an object URL
+					URL.revokeObjectURL(file);
+				} else { // file is a File
+					file.remove();
+				}
+			}
+			deletion_queue.length = 0; // clear queue
+		}
+		, dispatch = function(filesaver, event_types, event) {
+			event_types = [].concat(event_types);
+			var i = event_types.length;
+			while (i--) {
+				var listener = filesaver["on" + event_types[i]];
+				if (typeof listener === "function") {
+					try {
+						listener.call(filesaver, event || filesaver);
+					} catch (ex) {
+						throw_outside(ex);
+					}
+				}
+			}
+		}
+		, FileSaver = function(blob, name) {
+			// First try a.download, then web filesystem, then object URLs
+			var
+				  filesaver = this
+				, type = blob.type
+				, blob_changed = false
+				, object_url
+				, target_view
+				, get_object_url = function() {
+					var object_url = get_URL().createObjectURL(blob);
+					deletion_queue.push(object_url);
+					return object_url;
+				}
+				, dispatch_all = function() {
+					dispatch(filesaver, "writestart progress write writeend".split(" "));
+				}
+				// on any filesys errors revert to saving with object URLs
+				, fs_error = function() {
+					// don't create more object URLs than needed
+					if (blob_changed || !object_url) {
+						object_url = get_object_url(blob);
+					}
+					if (target_view) {
+						target_view.location.href = object_url;
+					} else {
+						window.open(object_url, "_blank");
+					}
+					filesaver.readyState = filesaver.DONE;
+					dispatch_all();
+				}
+				, abortable = function(func) {
+					return function() {
+						if (filesaver.readyState !== filesaver.DONE) {
+							return func.apply(this, arguments);
+						}
+					};
+				}
+				, create_if_not_found = {create: true, exclusive: false}
+				, slice
+			;
+			filesaver.readyState = filesaver.INIT;
+			if (!name) {
+				name = "download";
+			}
+			if (can_use_save_link) {
+				object_url = get_object_url(blob);
+				// FF for Android has a nasty garbage collection mechanism
+				// that turns all objects that are not pure javascript into 'deadObject'
+				// this means `doc` and `save_link` are unusable and need to be recreated
+				// `view` is usable though:
+				doc = view.document;
+				save_link = doc.createElementNS("http://www.w3.org/1999/xhtml", "a");
+				save_link.href = object_url;
+				save_link.download = name;
+				var event = doc.createEvent("MouseEvents");
+				event.initMouseEvent(
+					"click", true, false, view, 0, 0, 0, 0, 0
+					, false, false, false, false, 0, null
+				);
+				save_link.dispatchEvent(event);
+				filesaver.readyState = filesaver.DONE;
+				dispatch_all();
+				return;
+			}
+			// Object and web filesystem URLs have a problem saving in Google Chrome when
+			// viewed in a tab, so I force save with application/octet-stream
+			// http://code.google.com/p/chromium/issues/detail?id=91158
+			if (view.chrome && type && type !== force_saveable_type) {
+				slice = blob.slice || blob.webkitSlice;
+				blob = slice.call(blob, 0, blob.size, force_saveable_type);
+				blob_changed = true;
+			}
+			// Since I can't be sure that the guessed media type will trigger a download
+			// in WebKit, I append .download to the filename.
+			// https://bugs.webkit.org/show_bug.cgi?id=65440
+			if (webkit_req_fs && name !== "download") {
+				name += ".download";
+			}
+			if (type === force_saveable_type || webkit_req_fs) {
+				target_view = view;
+			}
+			if (!req_fs) {
+				fs_error();
+				return;
+			}
+			fs_min_size += blob.size;
+			req_fs(view.TEMPORARY, fs_min_size, abortable(function(fs) {
+				fs.root.getDirectory("saved", create_if_not_found, abortable(function(dir) {
+					var save = function() {
+						dir.getFile(name, create_if_not_found, abortable(function(file) {
+							file.createWriter(abortable(function(writer) {
+								writer.onwriteend = function(event) {
+									target_view.location.href = file.toURL();
+									deletion_queue.push(file);
+									filesaver.readyState = filesaver.DONE;
+									dispatch(filesaver, "writeend", event);
+								};
+								writer.onerror = function() {
+									var error = writer.error;
+									if (error.code !== error.ABORT_ERR) {
+										fs_error();
+									}
+								};
+								"writestart progress write abort".split(" ").forEach(function(event) {
+									writer["on" + event] = filesaver["on" + event];
+								});
+								writer.write(blob);
+								filesaver.abort = function() {
+									writer.abort();
+									filesaver.readyState = filesaver.DONE;
+								};
+								filesaver.readyState = filesaver.WRITING;
+							}), fs_error);
+						}), fs_error);
+					};
+					dir.getFile(name, {create: false}, abortable(function(file) {
+						// delete file if it already exists
+						file.remove();
+						save();
+					}), abortable(function(ex) {
+						if (ex.code === ex.NOT_FOUND_ERR) {
+							save();
+						} else {
+							fs_error();
+						}
+					}));
+				}), fs_error);
+			}), fs_error);
+		}
+		, FS_proto = FileSaver.prototype
+		, saveAs = function(blob, name) {
+			return new FileSaver(blob, name);
+		}
+	;
+	FS_proto.abort = function() {
+		var filesaver = this;
+		filesaver.readyState = filesaver.DONE;
+		dispatch(filesaver, "abort");
+	};
+	FS_proto.readyState = FS_proto.INIT = 0;
+	FS_proto.WRITING = 1;
+	FS_proto.DONE = 2;
+
+	FS_proto.error =
+	FS_proto.onwritestart =
+	FS_proto.onprogress =
+	FS_proto.onwrite =
+	FS_proto.onabort =
+	FS_proto.onerror =
+	FS_proto.onwriteend =
+		null;
+
+	view.addEventListener("unload", process_deletion_queue, false);
+	saveAs.unload = function() {
+		process_deletion_queue();
+		view.removeEventListener("unload", process_deletion_queue, false);
+	};
+	return saveAs;
+}(
+	   typeof self !== "undefined" && self
+	|| typeof window !== "undefined" && window
+	|| this.content
+));
+// `self` is undefined in Firefox for Android content script context
+// while `this` is nsIContentFrameMessageManager
+// with an attribute `content` that corresponds to the window
+
+if (typeof module !== "undefined") module.exports = saveAs;

+ 13 - 0
libs/npyjs/CHANGELOG.md

@@ -0,0 +1,13 @@
+# Changelog
+
+## 0.7.0
+
+-   add support for f16 dtypes, with f32 conversion as needed (#49, thanks @eek!)
+
+## 0.6.0
+
+-   Add type declarations
+
+## 0.5.0
+
+-   #36: Support passing an `ArrayBuffer` directly (thanks @AdityaSarwade!)

+ 174 - 0
libs/npyjs/LICENSE

@@ -0,0 +1,174 @@
+Apache License
+Version 2.0, January 2004
+http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+"License" shall mean the terms and conditions for use, reproduction,
+and distribution as defined by Sections 1 through 9 of this document.
+
+"Licensor" shall mean the copyright owner or entity authorized by
+the copyright owner that is granting the License.
+
+"Legal Entity" shall mean the union of the acting entity and all
+other entities that control, are controlled by, or are under common
+control with that entity. For the purposes of this definition,
+"control" means (i) the power, direct or indirect, to cause the
+direction or management of such entity, whether by contract or
+otherwise, or (ii) ownership of fifty percent (50%) or more of the
+outstanding shares, or (iii) beneficial ownership of such entity.
+
+"You" (or "Your") shall mean an individual or Legal Entity
+exercising permissions granted by this License.
+
+"Source" form shall mean the preferred form for making modifications,
+including but not limited to software source code, documentation
+source, and configuration files.
+
+"Object" form shall mean any form resulting from mechanical
+transformation or translation of a Source form, including but
+not limited to compiled object code, generated documentation,
+and conversions to other media types.
+
+"Work" shall mean the work of authorship, whether in Source or
+Object form, made available under the License, as indicated by a
+copyright notice that is included in or attached to the work
+(an example is provided in the Appendix below).
+
+"Derivative Works" shall mean any work, whether in Source or Object
+form, that is based on (or derived from) the Work and for which the
+editorial revisions, annotations, elaborations, or other modifications
+represent, as a whole, an original work of authorship. For the purposes
+of this License, Derivative Works shall not include works that remain
+separable from, or merely link (or bind by name) to the interfaces of,
+the Work and Derivative Works thereof.
+
+"Contribution" shall mean any work of authorship, including
+the original version of the Work and any modifications or additions
+to that Work or Derivative Works thereof, that is intentionally
+submitted to Licensor for inclusion in the Work by the copyright owner
+or by an individual or Legal Entity authorized to submit on behalf of
+the copyright owner. For the purposes of this definition, "submitted"
+means any form of electronic, verbal, or written communication sent
+to the Licensor or its representatives, including but not limited to
+communication on electronic mailing lists, source code control systems,
+and issue tracking systems that are managed by, or on behalf of, the
+Licensor for the purpose of discussing and improving the Work, but
+excluding communication that is conspicuously marked or otherwise
+designated in writing by the copyright owner as "Not a Contribution."
+
+"Contributor" shall mean Licensor and any individual or Legal Entity
+on behalf of whom a Contribution has been received by Licensor and
+subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+this License, each Contributor hereby grants to You a perpetual,
+worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+copyright license to reproduce, prepare Derivative Works of,
+publicly display, publicly perform, sublicense, and distribute the
+Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+this License, each Contributor hereby grants to You a perpetual,
+worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+(except as stated in this section) patent license to make, have made,
+use, offer to sell, sell, import, and otherwise transfer the Work,
+where such license applies only to those patent claims licensable
+by such Contributor that are necessarily infringed by their
+Contribution(s) alone or by combination of their Contribution(s)
+with the Work to which such Contribution(s) was submitted. If You
+institute patent litigation against any entity (including a
+cross-claim or counterclaim in a lawsuit) alleging that the Work
+or a Contribution incorporated within the Work constitutes direct
+or contributory patent infringement, then any patent licenses
+granted to You under this License for that Work shall terminate
+as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+Work or Derivative Works thereof in any medium, with or without
+modifications, and in Source or Object form, provided that You
+meet the following conditions:
+
+(a) You must give any other recipients of the Work or
+Derivative Works a copy of this License; and
+
+(b) You must cause any modified files to carry prominent notices
+stating that You changed the files; and
+
+(c) You must retain, in the Source form of any Derivative Works
+that You distribute, all copyright, patent, trademark, and
+attribution notices from the Source form of the Work,
+excluding those notices that do not pertain to any part of
+the Derivative Works; and
+
+(d) If the Work includes a "NOTICE" text file as part of its
+distribution, then any Derivative Works that You distribute must
+include a readable copy of the attribution notices contained
+within such NOTICE file, excluding those notices that do not
+pertain to any part of the Derivative Works, in at least one
+of the following places: within a NOTICE text file distributed
+as part of the Derivative Works; within the Source form or
+documentation, if provided along with the Derivative Works; or,
+within a display generated by the Derivative Works, if and
+wherever such third-party notices normally appear. The contents
+of the NOTICE file are for informational purposes only and
+do not modify the License. You may add Your own attribution
+notices within Derivative Works that You distribute, alongside
+or as an addendum to the NOTICE text from the Work, provided
+that such additional attribution notices cannot be construed
+as modifying the License.
+
+You may add Your own copyright statement to Your modifications and
+may provide additional or different license terms and conditions
+for use, reproduction, or distribution of Your modifications, or
+for any such Derivative Works as a whole, provided Your use,
+reproduction, and distribution of the Work otherwise complies with
+the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+any Contribution intentionally submitted for inclusion in the Work
+by You to the Licensor shall be under the terms and conditions of
+this License, without any additional terms or conditions.
+Notwithstanding the above, nothing herein shall supersede or modify
+the terms of any separate license agreement you may have executed
+with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+names, trademarks, service marks, or product names of the Licensor,
+except as required for reasonable and customary use in describing the
+origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+agreed to in writing, Licensor provides the Work (and each
+Contributor provides its Contributions) on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+implied, including, without limitation, any warranties or conditions
+of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+PARTICULAR PURPOSE. You are solely responsible for determining the
+appropriateness of using or redistributing the Work and assume any
+risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+whether in tort (including negligence), contract, or otherwise,
+unless required by applicable law (such as deliberate and grossly
+negligent acts) or agreed to in writing, shall any Contributor be
+liable to You for damages, including any direct, indirect, special,
+incidental, or consequential damages of any character arising as a
+result of this License or out of the use or inability to use the
+Work (including but not limited to damages for loss of goodwill,
+work stoppage, computer failure or malfunction, or any and all
+other commercial damages or losses), even if such Contributor
+has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+the Work or Derivative Works thereof, You may choose to offer,
+and charge a fee for, acceptance of support, warranty, indemnity,
+or other liability obligations and/or rights consistent with this
+License. However, in accepting such obligations, You may act only
+on Your own behalf and on Your sole responsibility, not on behalf
+of any other Contributor, and only if You agree to indemnify,
+defend, and hold each Contributor harmless for any liability
+incurred by, or claims asserted against, such Contributor by reason
+of your accepting any such warranty or additional liability.

+ 99 - 0
libs/npyjs/README.md

@@ -0,0 +1,99 @@
+<h1 align=center>npy.js</h1>
+<h6 align=center>Read .npy files directly in JS</h6>
+
+<p align=center>
+    <a href="https://www.npmjs.com/package/npyjs"><img src="https://img.shields.io/npm/v/npyjs.svg?style=for-the-badge" /></a>
+    <a href="https://github.com/aplbrain/npyjs"><img src="https://img.shields.io/github/issues/aplbrain/npyjs.svg?style=for-the-badge" /></a>
+    <a href="https://github.com/aplbrain/npyjs"><img src="https://img.shields.io/github/license/aplbrain/npyjs.svg?style=for-the-badge" /></a>
+    <img alt="GitHub Workflow Status" src="https://img.shields.io/github/actions/workflow/status/aplbrain/npyjs/test-node.yml?label=Tests&style=for-the-badge">
+</p>
+
+Read .npy files from [numpy](https://numpy.org/doc/1.18/reference/generated/numpy.save.html) in Node/JS.
+
+## Installation
+
+Include npy.js in your project directly, or:
+
+```shell
+yarn add npyjs
+# npm i npyjs
+```
+
+## Import
+
+```javascript
+import npyjs from "npyjs";
+```
+
+## Usage
+
+-   Create a new npyjs object:
+
+```javascript
+let n = new npyjs();
+// Or with options:
+let n = new npyjs({ convertFloat16: false }); // Disable float16 to float32 conversion
+```
+
+-   This object can now be used load .npy files. Arrays can be returned via a JavaScript callback, so usage looks like this:
+
+```javascript
+n.load("my-array.npy", (array, shape) => {
+    // `array` is a one-dimensional array of the raw data
+    // `shape` is a one-dimensional array that holds a numpy-style shape.
+    console.log(`You loaded an array with ${array.length} elements and ${shape.length} dimensions.`);
+});
+```
+
+-   You can also use this library promise-style using either .then or async await:
+
+```javascript
+n.load("test.npy").then((res) => {
+    // res has { data, shape, dtype } members.
+});
+```
+
+```javascript
+const npyArray = await n.load("test.npy");
+```
+
+## Accessing multidimensional array elements
+
+-   You can conveniently access multidimensional array elements using the 'ndarray' library:
+
+```javascript
+import ndarray from "ndarray";
+const npyArray = ndarray(data, shape);
+npyArray.get(10, 15);
+```
+
+## Supported Data Types
+
+The library supports the following NumPy data types:
+
+-   `int8`, `uint8`
+-   `int16`, `uint16`
+-   `int32`, `uint32`
+-   `int64`, `uint64` (as BigInt)
+-   `float32`
+-   `float64`
+-   `float16` (converted to float32 by default)
+
+### Float16 Support
+
+By default, float16 arrays are automatically converted to float32 for compatibility, since JavaScript doesn't natively support float16. You can control this behavior with the constructor options:
+
+```javascript
+// Default behavior - float16 is converted to float32
+const n1 = new npyjs();
+// Keep float16 as raw uint16 values without conversion
+const n2 = new npyjs({ convertFloat16: false });
+```
+
+Unless otherwise specified, all code inside of this repository is covered under the license in [LICENSE](LICENSE).
+
+Please report bugs or contribute pull-requests on [GitHub](https://github.com/aplbrain/npyjs).
+
+---
+
+<p align="center"><small>Made with ♥ at <a href="http://www.jhuapl.edu/"><img alt="JHU APL" align="center" src="./docs/apl-logo.png" height="23px"></a></small></p>

+ 1 - 0
libs/npyjs/docs/_config.yml

@@ -0,0 +1 @@
+theme: jekyll-theme-cayman

BIN
libs/npyjs/docs/apl-logo.png


+ 49 - 0
libs/npyjs/docs/index.md

@@ -0,0 +1,49 @@
+---
+permalink: /index.html
+---
+
+# npy.js
+
+Read .npy files from [numpy](https://numpy.org/doc/1.18/reference/generated/numpy.save.html) in Node/JS.
+
+## Installation
+Include npy.js in your project directly, or:
+
+```shell
+yarn add npyjs
+# npm i npyjs
+```
+
+## Usage
+
+- Create a new npyjs object.
+```javascript
+import npyjs from "npyjs";
+let n = new npyjs();
+```
+- This object can now be used load .npy files. Arrays are returned via a JavaScript callback, so usage looks like this:
+```javascript
+n.load('my-array.npy', (array, shape) => {
+    // `array` is a one-dimensional array of the raw data
+    // `shape` is a one-dimensional array that holds a numpy-style shape.
+    console.log(`You loaded an array with ${array.length} elements and ${shape.length} dimensions.`);
+});
+```
+
+You can also use this library promise-style:
+
+```javascript
+n.load("test.npy").then(res => {
+    // res has { data, shape, dtype } members.
+});
+```
+
+Unless otherwise specified, all code inside of this repository is covered under the license in [LICENSE](LICENSE).
+
+
+Please report bugs or contribute pull-requests on [GitHub](https://github.com/aplbrain/npyjs).
+
+
+----
+
+<p align="center"><small>Made with ♥ at <a href="http://www.jhuapl.edu/"><img alt="JHU APL" align="center" src="./docs/apl-logo.png" height="23px"></a></small></p>

+ 99 - 0
libs/npyjs/index.d.ts

@@ -0,0 +1,99 @@
+type ValueOf<T> = T[keyof T];
+
+// Add constructor options type
+export interface NpyjsOptions {
+    convertFloat16?: boolean;
+}
+
+export type Dtypes = {
+    "<u1": {
+        name: "uint8";
+        size: 8;
+        arrayConstructor: typeof Uint8Array;
+    };
+    "|u1": {
+        name: "uint8";
+        size: 8;
+        arrayConstructor: typeof Uint8Array;
+    };
+    "<u2": {
+        name: "uint16";
+        size: 16;
+        arrayConstructor: typeof Uint16Array;
+    };
+    "|i1": {
+        name: "int8";
+        size: 8;
+        arrayConstructor: typeof Int8Array;
+    };
+    "<i2": {
+        name: "int16";
+        size: 16;
+        arrayConstructor: typeof Int16Array;
+    };
+    "<u4": {
+        name: "uint32";
+        size: 32;
+        arrayConstructor: typeof Int32Array;
+    };
+    "<i4": {
+        name: "int32";
+        size: 32;
+        arrayConstructor: typeof Int32Array;
+    };
+    "<u8": {
+        name: "uint64";
+        size: 64;
+        arrayConstructor: typeof BigUint64Array;
+    };
+    "<i8": {
+        name: "int64";
+        size: 64;
+        arrayConstructor: typeof BigInt64Array;
+    };
+    "<f4": {
+        name: "float32";
+        size: 32;
+        arrayConstructor: typeof Float32Array;
+    };
+    "<f8": {
+        name: "float64";
+        size: 64;
+        arrayConstructor: typeof Float64Array;
+    };
+    "<f2": {
+        name: "float16";
+        size: 16;
+        arrayConstructor: typeof Uint16Array;
+        converter?: (array: Uint16Array) => Float32Array;
+    };
+};
+
+export type Parsed = ValueOf<{
+    [K in keyof Dtypes]: {
+        dtype: Dtypes[K]["name"];
+        data: K extends "<f2" ? Float32Array : InstanceType<Dtypes[K]["arrayConstructor"]>;
+        shape: number[];
+        fortranOrder: boolean;
+    };
+}>;
+
+declare class npyjs {
+
+    constructor(opts?: NpyjsOptions);
+
+    dtypes: Dtypes;
+
+    parse(arrayBufferContents: ArrayBuffer): Parsed;
+
+    load(
+        filename: RequestInfo | URL | ArrayBuffer,
+        callback?: (result?: Parsed) => any,
+        fetchArgs?: RequestInit
+    ): Promise<Parsed>;
+
+    float16ToFloat32Array(float16Array: Uint16Array): Float32Array;
+    static float16ToFloat32(float16: number): number;
+}
+
+export default npyjs;

+ 180 - 0
libs/npyjs/index.js

@@ -0,0 +1,180 @@
+import fetch from 'cross-fetch';
+
+class npyjs {
+
+    constructor(opts) {
+        if (opts && !('convertFloat16' in opts)) {
+            console.warn([
+                "npyjs constructor now accepts {convertFloat16?: boolean}.",
+                "For usage, go to https://github.com/jhuapl-boss/npyjs."
+            ].join(" "));
+        }
+
+        this.convertFloat16 = opts?.convertFloat16 ?? true;
+
+        this.dtypes = {
+            "<u1": {
+                name: "uint8",
+                size: 8,
+                arrayConstructor: Uint8Array,
+            },
+            "|u1": {
+                name: "uint8",
+                size: 8,
+                arrayConstructor: Uint8Array,
+            },
+            "<u2": {
+                name: "uint16",
+                size: 16,
+                arrayConstructor: Uint16Array,
+            },
+            "|i1": {
+                name: "int8",
+                size: 8,
+                arrayConstructor: Int8Array,
+            },
+            "<i2": {
+                name: "int16",
+                size: 16,
+                arrayConstructor: Int16Array,
+            },
+            "<u4": {
+                name: "uint32",
+                size: 32,
+                arrayConstructor: Uint32Array,
+            },
+            "<i4": {
+                name: "int32",
+                size: 32,
+                arrayConstructor: Int32Array,
+            },
+            "<u8": {
+                name: "uint64",
+                size: 64,
+                arrayConstructor: BigUint64Array,
+            },
+            "<i8": {
+                name: "int64",
+                size: 64,
+                arrayConstructor: BigInt64Array,
+            },
+            "<f4": {
+                name: "float32",
+                size: 32,
+                arrayConstructor: Float32Array
+            },
+            "<f8": {
+                name: "float64",
+                size: 64,
+                arrayConstructor: Float64Array
+            },
+            "<f2": {
+                name: "float16",
+                size: 16,
+                arrayConstructor: Uint16Array,
+                converter: this.convertFloat16 ? this.float16ToFloat32Array : undefined
+            },
+        };
+    }
+
+    float16ToFloat32Array(float16Array) {
+        const length = float16Array.length;
+        const float32Array = new Float32Array(length);
+        
+        for (let i = 0; i < length; i++) {
+            float32Array[i] = npyjs.float16ToFloat32(float16Array[i]);
+        }
+        
+        return float32Array;
+    }
+
+    static float16ToFloat32(float16) {
+        // Extract the parts of the float16
+        const sign = (float16 >> 15) & 0x1;
+        const exponent = (float16 >> 10) & 0x1f;
+        const fraction = float16 & 0x3ff;
+
+        // Handle special cases
+        if (exponent === 0) {
+            if (fraction === 0) {
+                // Zero
+                return sign ? -0 : 0;
+            }
+            // Denormalized number
+            return (sign ? -1 : 1) * Math.pow(2, -14) * (fraction / 0x400);
+        } else if (exponent === 0x1f) {
+            if (fraction === 0) {
+                // Infinity
+                return sign ? -Infinity : Infinity;
+            }
+            // NaN
+            return NaN;
+        }
+
+        // Normalized number
+        return (sign ? -1 : 1) * Math.pow(2, exponent - 15) * (1 + fraction / 0x400);
+    }
+
+    parse(arrayBufferContents) {
+        // const version = arrayBufferContents.slice(6, 8); // Uint8-encoded
+        const headerLength = new DataView(arrayBufferContents.slice(8, 10)).getUint8(0);
+        const offsetBytes = 10 + headerLength;
+
+        const hcontents = new TextDecoder("utf-8").decode(
+            new Uint8Array(arrayBufferContents.slice(10, 10 + headerLength))
+        );
+        const header = JSON.parse(
+            hcontents
+                .toLowerCase() // True -> true
+                .replace(/'/g, '"')
+                .replace("(", "[")
+                .replace(/,*\),*/g, "]")
+        );
+        const shape = header.shape;
+        const dtype = this.dtypes[header.descr];
+
+        if (!dtype) {
+            console.error(`Unsupported dtype: ${header.descr}`);
+            return null;
+        }
+
+        const nums = new dtype.arrayConstructor(
+            arrayBufferContents,
+            offsetBytes
+        );
+
+        // Convert float16 to float32 if converter exists
+        const data = dtype.converter ? dtype.converter.call(this, nums) : nums;
+
+        return {
+            dtype: dtype.name,
+            data: data,
+            shape,
+            fortranOrder: header.fortran_order
+        };
+    }
+
+    async load(filename, callback, fetchArgs) {
+        /*
+        Loads an array from a stream of bytes.
+        */
+        fetchArgs = fetchArgs || {};
+        let arrayBuf;
+        // If filename is ArrayBuffer
+        if (filename instanceof ArrayBuffer) {
+            arrayBuf = filename;
+        }
+        // If filename is a file path
+        else {
+            const resp = await fetch(filename, { ...fetchArgs });
+            arrayBuf = await resp.arrayBuffer();
+        }
+        const result = this.parse(arrayBuf);
+        if (callback) {
+            return callback(result);
+        }
+        return result;
+    }
+}
+
+export default npyjs;

+ 23 - 0
libs/npyjs/package.json

@@ -0,0 +1,23 @@
+{
+    "name": "npyjs",
+    "version": "0.7.0",
+    "type": "module",
+    "description": "Parse npy files in JS",
+    "exports": "./index.js",
+    "repository": "https://github.com/jhuapl-boss/npyjs.git",
+    "author": "Jordan Matelsky <j6k4m8@gmail.com>",
+    "license": "Apache-2.0",
+    "engines": {
+        "node": "^12.20.0 || >=14.13.1"
+    },
+    "devDependencies": {
+        "mocha": "^10.2.0",
+        "typescript": "4.1.x"
+    },
+    "scripts": {
+        "test": "mocha"
+    },
+    "dependencies": {
+        "cross-fetch": "^3.1.5"
+    }
+}

BIN
libs/npyjs/test/data/10-float16.npy


BIN
libs/npyjs/test/data/10-float32.npy


BIN
libs/npyjs/test/data/10-float64.npy


BIN
libs/npyjs/test/data/10-int16.npy


BIN
libs/npyjs/test/data/10-int64.npy


BIN
libs/npyjs/test/data/10-int8.npy


BIN
libs/npyjs/test/data/100x100x100-float16.npy


BIN
libs/npyjs/test/data/100x100x100-float32.npy


BIN
libs/npyjs/test/data/100x100x100-float64.npy


BIN
libs/npyjs/test/data/100x100x100-int16.npy


BIN
libs/npyjs/test/data/100x100x100-int64.npy


BIN
libs/npyjs/test/data/100x100x100-int8.npy


BIN
libs/npyjs/test/data/4x4x4x4x4-float16.npy


BIN
libs/npyjs/test/data/4x4x4x4x4-float32.npy


BIN
libs/npyjs/test/data/4x4x4x4x4-float64.npy


BIN
libs/npyjs/test/data/4x4x4x4x4-int16.npy


+ 0 - 0
libs/npyjs/test/data/4x4x4x4x4-int64.npy


Неке датотеке нису приказане због велике количине промена