mirror of https://github.com/kern/filepizza
Add streamsaver
parent
a0a1d8643c
commit
1ceca18cc0
@ -0,0 +1,168 @@
|
|||||||
|
<!--
|
||||||
|
https://github.com/jimmywarting/StreamSaver.js/blob/master/mitm.html
|
||||||
|
|
||||||
|
mitm.html is the lite "man in the middle"
|
||||||
|
|
||||||
|
This is only meant to signal the opener's messageChannel to
|
||||||
|
the service worker - when that is done this mitm can be closed
|
||||||
|
but it's better to keep it alive since this also stops the sw
|
||||||
|
from restarting
|
||||||
|
|
||||||
|
The service worker is capable of intercepting all request and fork their
|
||||||
|
own "fake" response - wish we are going to craft
|
||||||
|
when the worker then receives a stream then the worker will tell the opener
|
||||||
|
to open up a link that will start the download
|
||||||
|
-->
|
||||||
|
<script>
|
||||||
|
// This will prevent the sw from restarting
|
||||||
|
let keepAlive = () => {
|
||||||
|
keepAlive = () => {}
|
||||||
|
var ping = location.href.substr(0, location.href.lastIndexOf('/')) + '/ping'
|
||||||
|
var interval = setInterval(() => {
|
||||||
|
if (sw) {
|
||||||
|
sw.postMessage('ping')
|
||||||
|
} else {
|
||||||
|
fetch(ping).then(res => res.text(!res.ok && clearInterval(interval)))
|
||||||
|
}
|
||||||
|
}, 10000)
|
||||||
|
}
|
||||||
|
|
||||||
|
// message event is the first thing we need to setup a listner for
|
||||||
|
// don't want the opener to do a random timeout - instead they can listen for
|
||||||
|
// the ready event
|
||||||
|
// but since we need to wait for the Service Worker registration, we store the
|
||||||
|
// message for later
|
||||||
|
let messages = []
|
||||||
|
window.onmessage = evt => messages.push(evt)
|
||||||
|
|
||||||
|
let sw = null
|
||||||
|
let scope = ''
|
||||||
|
|
||||||
|
function registerWorker() {
|
||||||
|
return navigator.serviceWorker.getRegistration('./').then(swReg => {
|
||||||
|
return swReg || navigator.serviceWorker.register('sw.js', { scope: './' })
|
||||||
|
}).then(swReg => {
|
||||||
|
const swRegTmp = swReg.installing || swReg.waiting
|
||||||
|
|
||||||
|
scope = swReg.scope
|
||||||
|
|
||||||
|
return (sw = swReg.active) || new Promise(resolve => {
|
||||||
|
swRegTmp.addEventListener('statechange', fn = () => {
|
||||||
|
if (swRegTmp.state === 'activated') {
|
||||||
|
swRegTmp.removeEventListener('statechange', fn)
|
||||||
|
sw = swReg.active
|
||||||
|
resolve()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now that we have the Service Worker registered we can process messages
|
||||||
|
function onMessage (event) {
|
||||||
|
let { data, ports, origin } = event
|
||||||
|
|
||||||
|
// It's important to have a messageChannel, don't want to interfere
|
||||||
|
// with other simultaneous downloads
|
||||||
|
if (!ports || !ports.length) {
|
||||||
|
throw new TypeError("[StreamSaver] You didn't send a messageChannel")
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof data !== 'object') {
|
||||||
|
throw new TypeError("[StreamSaver] You didn't send a object")
|
||||||
|
}
|
||||||
|
|
||||||
|
// the default public service worker for StreamSaver is shared among others.
|
||||||
|
// so all download links needs to be prefixed to avoid any other conflict
|
||||||
|
data.origin = origin
|
||||||
|
|
||||||
|
// if we ever (in some feature versoin of streamsaver) would like to
|
||||||
|
// redirect back to the page of who initiated a http request
|
||||||
|
data.referrer = data.referrer || document.referrer || origin
|
||||||
|
|
||||||
|
// pass along version for possible backwards compatibility in sw.js
|
||||||
|
data.streamSaverVersion = new URLSearchParams(location.search).get('version')
|
||||||
|
|
||||||
|
if (data.streamSaverVersion === '1.2.0') {
|
||||||
|
console.warn('[StreamSaver] please update streamsaver')
|
||||||
|
}
|
||||||
|
|
||||||
|
/** @since v2.0.0 */
|
||||||
|
if (!data.headers) {
|
||||||
|
console.warn("[StreamSaver] pass `data.headers` that you would like to pass along to the service worker\nit should be a 2D array or a key/val object that fetch's Headers api accepts")
|
||||||
|
} else {
|
||||||
|
// test if it's correct
|
||||||
|
// should thorw a typeError if not
|
||||||
|
new Headers(data.headers)
|
||||||
|
}
|
||||||
|
|
||||||
|
/** @since v2.0.0 */
|
||||||
|
if (typeof data.filename === 'string') {
|
||||||
|
console.warn("[StreamSaver] You shouldn't send `data.filename` anymore. It should be included in the Content-Disposition header option")
|
||||||
|
// Do what File constructor do with fileNames
|
||||||
|
data.filename = data.filename.replace(/\//g, ':')
|
||||||
|
}
|
||||||
|
|
||||||
|
/** @since v2.0.0 */
|
||||||
|
if (data.size) {
|
||||||
|
console.warn("[StreamSaver] You shouldn't send `data.size` anymore. It should be included in the content-length header option")
|
||||||
|
}
|
||||||
|
|
||||||
|
/** @since v2.0.0 */
|
||||||
|
if (data.readableStream) {
|
||||||
|
console.warn("[StreamSaver] You should send the readableStream in the messageChannel, not throught mitm")
|
||||||
|
}
|
||||||
|
|
||||||
|
/** @since v2.0.0 */
|
||||||
|
if (!data.pathname) {
|
||||||
|
console.warn("[StreamSaver] Please send `data.pathname` (eg: /pictures/summer.jpg)")
|
||||||
|
data.pathname = Math.random().toString().slice(-6) + '/' + data.filename
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove all leading slashes
|
||||||
|
data.pathname = data.pathname.replace(/^\/+/g, '')
|
||||||
|
|
||||||
|
// remove protocol
|
||||||
|
let org = origin.replace(/(^\w+:|^)\/\//, '')
|
||||||
|
|
||||||
|
// set the absolute pathname to the download url.
|
||||||
|
data.url = new URL(`${scope + org}/${data.pathname}`).toString()
|
||||||
|
|
||||||
|
if (!data.url.startsWith(`${scope + org}/`)) {
|
||||||
|
throw new TypeError('[StreamSaver] bad `data.pathname`')
|
||||||
|
}
|
||||||
|
|
||||||
|
// This sends the message data as well as transferring
|
||||||
|
// messageChannel.port2 to the service worker. The service worker can
|
||||||
|
// then use the transferred port to reply via postMessage(), which
|
||||||
|
// will in turn trigger the onmessage handler on messageChannel.port1.
|
||||||
|
|
||||||
|
const transferable = data.readableStream
|
||||||
|
? [ ports[0], data.readableStream ]
|
||||||
|
: [ ports[0] ]
|
||||||
|
|
||||||
|
if (!(data.readableStream || data.transferringReadable)) {
|
||||||
|
keepAlive()
|
||||||
|
}
|
||||||
|
|
||||||
|
return sw.postMessage(data, transferable)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (window.opener) {
|
||||||
|
// The opener can't listen to onload event, so we need to help em out!
|
||||||
|
// (telling them that we are ready to accept postMessage's)
|
||||||
|
window.opener.postMessage('StreamSaver::loadedPopup', '*')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (navigator.serviceWorker) {
|
||||||
|
registerWorker().then(() => {
|
||||||
|
window.onmessage = onMessage
|
||||||
|
messages.forEach(window.onmessage)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
// FF can ping sw with fetch from a secure hidden iframe
|
||||||
|
// shouldn't really be possible?
|
||||||
|
keepAlive()
|
||||||
|
}
|
||||||
|
|
||||||
|
</script>
|
||||||
@ -0,0 +1,143 @@
|
|||||||
|
// https://github.com/jimmywarting/StreamSaver.js/blob/master/sw.js
|
||||||
|
|
||||||
|
/* global self ReadableStream Response */
|
||||||
|
|
||||||
|
self.addEventListener('install', () => {
|
||||||
|
self.skipWaiting()
|
||||||
|
})
|
||||||
|
|
||||||
|
self.addEventListener('activate', (event) => {
|
||||||
|
event.waitUntil(self.clients.claim())
|
||||||
|
})
|
||||||
|
|
||||||
|
const map = new Map()
|
||||||
|
|
||||||
|
// This should be called once per download
|
||||||
|
// Each event has a dataChannel that the data will be piped through
|
||||||
|
self.onmessage = (event) => {
|
||||||
|
// We send a heartbeat every x secound to keep the
|
||||||
|
// service worker alive if a transferable stream is not sent
|
||||||
|
if (event.data === 'ping') {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = event.data
|
||||||
|
const downloadUrl =
|
||||||
|
data.url ||
|
||||||
|
self.registration.scope +
|
||||||
|
Math.random() +
|
||||||
|
'/' +
|
||||||
|
(typeof data === 'string' ? data : data.filename)
|
||||||
|
const port = event.ports[0]
|
||||||
|
const metadata = new Array(3) // [stream, data, port]
|
||||||
|
|
||||||
|
metadata[1] = data
|
||||||
|
metadata[2] = port
|
||||||
|
|
||||||
|
// Note to self:
|
||||||
|
// old streamsaver v1.2.0 might still use `readableStream`...
|
||||||
|
// but v2.0.0 will always transfer the stream throught MessageChannel #94
|
||||||
|
if (event.data.readableStream) {
|
||||||
|
metadata[0] = event.data.readableStream
|
||||||
|
} else if (event.data.transferringReadable) {
|
||||||
|
port.onmessage = (evt) => {
|
||||||
|
port.onmessage = null
|
||||||
|
metadata[0] = evt.data.readableStream
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
metadata[0] = createStream(port)
|
||||||
|
}
|
||||||
|
|
||||||
|
map.set(downloadUrl, metadata)
|
||||||
|
port.postMessage({ download: downloadUrl })
|
||||||
|
}
|
||||||
|
|
||||||
|
function createStream(port) {
|
||||||
|
// ReadableStream is only supported by chrome 52
|
||||||
|
return new ReadableStream({
|
||||||
|
start(controller) {
|
||||||
|
// When we receive data on the messageChannel, we write
|
||||||
|
port.onmessage = ({ data }) => {
|
||||||
|
if (data === 'end') {
|
||||||
|
return controller.close()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data === 'abort') {
|
||||||
|
controller.error('Aborted the download')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
controller.enqueue(data)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
cancel() {
|
||||||
|
console.log('user aborted')
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
self.onfetch = (event) => {
|
||||||
|
const url = event.request.url
|
||||||
|
|
||||||
|
// this only works for Firefox
|
||||||
|
if (url.endsWith('/ping')) {
|
||||||
|
return event.respondWith(new Response('pong'))
|
||||||
|
}
|
||||||
|
|
||||||
|
const hijacke = map.get(url)
|
||||||
|
|
||||||
|
if (!hijacke) return null
|
||||||
|
|
||||||
|
const [stream, data, port] = hijacke
|
||||||
|
|
||||||
|
map.delete(url)
|
||||||
|
|
||||||
|
// Not comfortable letting any user control all headers
|
||||||
|
// so we only copy over the length & disposition
|
||||||
|
const responseHeaders = new Headers({
|
||||||
|
'Content-Type': 'application/octet-stream; charset=utf-8',
|
||||||
|
|
||||||
|
// To be on the safe side, The link can be opened in a iframe.
|
||||||
|
// but octet-stream should stop it.
|
||||||
|
'Content-Security-Policy': "default-src 'none'",
|
||||||
|
'X-Content-Security-Policy': "default-src 'none'",
|
||||||
|
'X-WebKit-CSP': "default-src 'none'",
|
||||||
|
'X-XSS-Protection': '1; mode=block',
|
||||||
|
})
|
||||||
|
|
||||||
|
let headers = new Headers(data.headers || {})
|
||||||
|
|
||||||
|
if (headers.has('Content-Length')) {
|
||||||
|
responseHeaders.set('Content-Length', headers.get('Content-Length'))
|
||||||
|
}
|
||||||
|
|
||||||
|
if (headers.has('Content-Disposition')) {
|
||||||
|
responseHeaders.set(
|
||||||
|
'Content-Disposition',
|
||||||
|
headers.get('Content-Disposition'),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// data, data.filename and size should not be used anymore
|
||||||
|
if (data.size) {
|
||||||
|
console.warn('Depricated')
|
||||||
|
responseHeaders.set('Content-Length', data.size)
|
||||||
|
}
|
||||||
|
|
||||||
|
let fileName = typeof data === 'string' ? data : data.filename
|
||||||
|
if (fileName) {
|
||||||
|
console.warn('Depricated')
|
||||||
|
// Make filename RFC5987 compatible
|
||||||
|
fileName = encodeURIComponent(fileName)
|
||||||
|
.replace(/['()]/g, escape)
|
||||||
|
.replace(/\*/g, '%2A')
|
||||||
|
responseHeaders.set(
|
||||||
|
'Content-Disposition',
|
||||||
|
"attachment; filename*=UTF-8''" + fileName,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
event.respondWith(new Response(stream, { headers: responseHeaders }))
|
||||||
|
|
||||||
|
port.postMessage({ debug: 'Download started' })
|
||||||
|
}
|
||||||
@ -0,0 +1,235 @@
|
|||||||
|
// Based on https://github.com/jimmywarting/StreamSaver.js/blob/master/examples/zip-stream.js
|
||||||
|
|
||||||
|
class Crc32 {
|
||||||
|
crc: number
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.crc = -1
|
||||||
|
}
|
||||||
|
|
||||||
|
table = (() => {
|
||||||
|
let i
|
||||||
|
let j
|
||||||
|
let t
|
||||||
|
const table = []
|
||||||
|
for (i = 0; i < 256; i++) {
|
||||||
|
t = i
|
||||||
|
for (j = 0; j < 8; j++) {
|
||||||
|
t = t & 1 ? (t >>> 1) ^ 0xedb88320 : t >>> 1
|
||||||
|
}
|
||||||
|
table[i] = t
|
||||||
|
}
|
||||||
|
return table
|
||||||
|
})()
|
||||||
|
|
||||||
|
append(data) {
|
||||||
|
let crc = this.crc | 0
|
||||||
|
const table = this.table
|
||||||
|
for (let offset = 0, len = data.length | 0; offset < len; offset++) {
|
||||||
|
crc = (crc >>> 8) ^ table[(crc ^ data[offset]) & 0xff]
|
||||||
|
}
|
||||||
|
this.crc = crc
|
||||||
|
}
|
||||||
|
|
||||||
|
get() {
|
||||||
|
return ~this.crc
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const getDataHelper = (byteLength) => {
|
||||||
|
const uint8 = new Uint8Array(byteLength)
|
||||||
|
return {
|
||||||
|
array: uint8,
|
||||||
|
view: new DataView(uint8.buffer),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const pump = (zipObj) =>
|
||||||
|
zipObj.reader.read().then((chunk) => {
|
||||||
|
if (chunk.done) return zipObj.writeFooter()
|
||||||
|
const outputData = chunk.value
|
||||||
|
zipObj.crc.append(outputData)
|
||||||
|
zipObj.uncompressedLength += outputData.length
|
||||||
|
zipObj.compressedLength += outputData.length
|
||||||
|
zipObj.ctrl.enqueue(outputData)
|
||||||
|
})
|
||||||
|
|
||||||
|
export function createZipStream(underlyingSource) {
|
||||||
|
const files = Object.create(null)
|
||||||
|
const filenames = []
|
||||||
|
const encoder = new TextEncoder()
|
||||||
|
let offset = 0
|
||||||
|
let activeZipIndex = 0
|
||||||
|
let ctrl
|
||||||
|
let activeZipObject, closed
|
||||||
|
|
||||||
|
function next() {
|
||||||
|
activeZipIndex++
|
||||||
|
activeZipObject = files[filenames[activeZipIndex]]
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-use-before-define
|
||||||
|
if (activeZipObject) processNextChunk()
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-use-before-define
|
||||||
|
else if (closed) closeZip()
|
||||||
|
}
|
||||||
|
|
||||||
|
const zipWriter = {
|
||||||
|
enqueue(fileLike) {
|
||||||
|
if (closed)
|
||||||
|
throw new TypeError(
|
||||||
|
'Cannot enqueue a chunk into a readable stream that is closed or has been requested to be closed',
|
||||||
|
)
|
||||||
|
|
||||||
|
let name = fileLike.name.trim()
|
||||||
|
const date = new Date(
|
||||||
|
typeof fileLike.lastModified === 'undefined'
|
||||||
|
? Date.now()
|
||||||
|
: fileLike.lastModified,
|
||||||
|
)
|
||||||
|
|
||||||
|
if (fileLike.directory && !name.endsWith('/')) name += '/'
|
||||||
|
if (files[name]) throw new Error('File already exists.')
|
||||||
|
|
||||||
|
const nameBuf = encoder.encode(name)
|
||||||
|
filenames.push(name)
|
||||||
|
|
||||||
|
const zipObject = (files[name] = {
|
||||||
|
level: 0,
|
||||||
|
ctrl,
|
||||||
|
directory: !!fileLike.directory,
|
||||||
|
nameBuf,
|
||||||
|
comment: encoder.encode(fileLike.comment || ''),
|
||||||
|
compressedLength: 0,
|
||||||
|
uncompressedLength: 0,
|
||||||
|
writeHeader() {
|
||||||
|
const header = getDataHelper(26)
|
||||||
|
const data = getDataHelper(30 + nameBuf.length)
|
||||||
|
|
||||||
|
zipObject.offset = offset
|
||||||
|
zipObject.header = header
|
||||||
|
if (zipObject.level !== 0 && !zipObject.directory) {
|
||||||
|
header.view.setUint16(4, 0x0800)
|
||||||
|
}
|
||||||
|
header.view.setUint32(0, 0x14000808)
|
||||||
|
header.view.setUint16(
|
||||||
|
6,
|
||||||
|
(((date.getHours() << 6) | date.getMinutes()) << 5) |
|
||||||
|
(date.getSeconds() / 2),
|
||||||
|
true,
|
||||||
|
)
|
||||||
|
header.view.setUint16(
|
||||||
|
8,
|
||||||
|
((((date.getFullYear() - 1980) << 4) | (date.getMonth() + 1)) <<
|
||||||
|
5) |
|
||||||
|
date.getDate(),
|
||||||
|
true,
|
||||||
|
)
|
||||||
|
header.view.setUint16(22, nameBuf.length, true)
|
||||||
|
data.view.setUint32(0, 0x504b0304)
|
||||||
|
data.array.set(header.array, 4)
|
||||||
|
data.array.set(nameBuf, 30)
|
||||||
|
offset += data.array.length
|
||||||
|
ctrl.enqueue(data.array)
|
||||||
|
},
|
||||||
|
writeFooter() {
|
||||||
|
const footer = getDataHelper(16)
|
||||||
|
footer.view.setUint32(0, 0x504b0708)
|
||||||
|
|
||||||
|
if (zipObject.crc) {
|
||||||
|
zipObject.header.view.setUint32(10, zipObject.crc.get(), true)
|
||||||
|
zipObject.header.view.setUint32(
|
||||||
|
14,
|
||||||
|
zipObject.compressedLength,
|
||||||
|
true,
|
||||||
|
)
|
||||||
|
zipObject.header.view.setUint32(
|
||||||
|
18,
|
||||||
|
zipObject.uncompressedLength,
|
||||||
|
true,
|
||||||
|
)
|
||||||
|
footer.view.setUint32(4, zipObject.crc.get(), true)
|
||||||
|
footer.view.setUint32(8, zipObject.compressedLength, true)
|
||||||
|
footer.view.setUint32(12, zipObject.uncompressedLength, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
ctrl.enqueue(footer.array)
|
||||||
|
offset += zipObject.compressedLength + 16
|
||||||
|
next()
|
||||||
|
},
|
||||||
|
fileLike,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!activeZipObject) {
|
||||||
|
activeZipObject = zipObject
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-use-before-define
|
||||||
|
processNextChunk()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
close() {
|
||||||
|
if (closed)
|
||||||
|
throw new TypeError(
|
||||||
|
'Cannot close a readable stream that has already been requested to be closed',
|
||||||
|
)
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-use-before-define
|
||||||
|
if (!activeZipObject) closeZip()
|
||||||
|
closed = true
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
function closeZip() {
|
||||||
|
let length = 0
|
||||||
|
let index = 0
|
||||||
|
let indexFilename, file
|
||||||
|
for (indexFilename = 0; indexFilename < filenames.length; indexFilename++) {
|
||||||
|
file = files[filenames[indexFilename]]
|
||||||
|
length += 46 + file.nameBuf.length + file.comment.length
|
||||||
|
}
|
||||||
|
const data = getDataHelper(length + 22)
|
||||||
|
for (indexFilename = 0; indexFilename < filenames.length; indexFilename++) {
|
||||||
|
file = files[filenames[indexFilename]]
|
||||||
|
data.view.setUint32(index, 0x504b0102)
|
||||||
|
data.view.setUint16(index + 4, 0x1400)
|
||||||
|
data.array.set(file.header.array, index + 6)
|
||||||
|
data.view.setUint16(index + 32, file.comment.length, true)
|
||||||
|
if (file.directory) {
|
||||||
|
data.view.setUint8(index + 38, 0x10)
|
||||||
|
}
|
||||||
|
data.view.setUint32(index + 42, file.offset, true)
|
||||||
|
data.array.set(file.nameBuf, index + 46)
|
||||||
|
data.array.set(file.comment, index + 46 + file.nameBuf.length)
|
||||||
|
index += 46 + file.nameBuf.length + file.comment.length
|
||||||
|
}
|
||||||
|
data.view.setUint32(index, 0x504b0506)
|
||||||
|
data.view.setUint16(index + 8, filenames.length, true)
|
||||||
|
data.view.setUint16(index + 10, filenames.length, true)
|
||||||
|
data.view.setUint32(index + 12, length, true)
|
||||||
|
data.view.setUint32(index + 16, offset, true)
|
||||||
|
ctrl.enqueue(data.array)
|
||||||
|
ctrl.close()
|
||||||
|
}
|
||||||
|
|
||||||
|
function processNextChunk() {
|
||||||
|
if (!activeZipObject) return
|
||||||
|
if (activeZipObject.directory)
|
||||||
|
return activeZipObject.writeFooter(activeZipObject.writeHeader())
|
||||||
|
if (activeZipObject.reader) return pump(activeZipObject)
|
||||||
|
if (activeZipObject.fileLike.stream) {
|
||||||
|
activeZipObject.crc = new Crc32()
|
||||||
|
activeZipObject.reader = activeZipObject.fileLike.stream().getReader()
|
||||||
|
activeZipObject.writeHeader()
|
||||||
|
} else next()
|
||||||
|
}
|
||||||
|
return new ReadableStream({
|
||||||
|
start: (c) => {
|
||||||
|
ctrl = c
|
||||||
|
underlyingSource.start &&
|
||||||
|
Promise.resolve(underlyingSource.start(zipWriter))
|
||||||
|
},
|
||||||
|
pull() {
|
||||||
|
return (
|
||||||
|
processNextChunk() ||
|
||||||
|
(underlyingSource.pull &&
|
||||||
|
Promise.resolve(underlyingSource.pull(zipWriter)))
|
||||||
|
)
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
Loading…
Reference in New Issue