diff --git a/package.json b/package.json
index 56efa33..7cc13b1 100644
--- a/package.json
+++ b/package.json
@@ -32,9 +32,11 @@
"react-device-detect": "^1.15.0",
"react-dom": "^16.13.1",
"react-qr": "0.0.2",
+ "streamsaver": "^2.0.5",
"styled-components": "^5.2.0",
"twilio": "^2.9.1",
"use-http": "^1.0.16",
+ "web-streams-polyfill": "^3.0.1",
"webrtcsupport": "^2.2.0",
"xkcd-password": "^1.2.0"
},
diff --git a/public/stream.html b/public/stream.html
new file mode 100644
index 0000000..8138731
--- /dev/null
+++ b/public/stream.html
@@ -0,0 +1,168 @@
+
+
diff --git a/public/sw.js b/public/sw.js
new file mode 100644
index 0000000..e04c70d
--- /dev/null
+++ b/public/sw.js
@@ -0,0 +1,143 @@
+// https://github.com/jimmywarting/StreamSaver.js/blob/master/sw.js
+
+/* global self ReadableStream Response */
+
+self.addEventListener('install', () => {
+ self.skipWaiting()
+})
+
+self.addEventListener('activate', (event) => {
+ event.waitUntil(self.clients.claim())
+})
+
+const map = new Map()
+
+// This should be called once per download
+// Each event has a dataChannel that the data will be piped through
+self.onmessage = (event) => {
+ // We send a heartbeat every x secound to keep the
+ // service worker alive if a transferable stream is not sent
+ if (event.data === 'ping') {
+ return
+ }
+
+ const data = event.data
+ const downloadUrl =
+ data.url ||
+ self.registration.scope +
+ Math.random() +
+ '/' +
+ (typeof data === 'string' ? data : data.filename)
+ const port = event.ports[0]
+ const metadata = new Array(3) // [stream, data, port]
+
+ metadata[1] = data
+ metadata[2] = port
+
+ // Note to self:
+ // old streamsaver v1.2.0 might still use `readableStream`...
+ // but v2.0.0 will always transfer the stream throught MessageChannel #94
+ if (event.data.readableStream) {
+ metadata[0] = event.data.readableStream
+ } else if (event.data.transferringReadable) {
+ port.onmessage = (evt) => {
+ port.onmessage = null
+ metadata[0] = evt.data.readableStream
+ }
+ } else {
+ metadata[0] = createStream(port)
+ }
+
+ map.set(downloadUrl, metadata)
+ port.postMessage({ download: downloadUrl })
+}
+
+function createStream(port) {
+ // ReadableStream is only supported by chrome 52
+ return new ReadableStream({
+ start(controller) {
+ // When we receive data on the messageChannel, we write
+ port.onmessage = ({ data }) => {
+ if (data === 'end') {
+ return controller.close()
+ }
+
+ if (data === 'abort') {
+ controller.error('Aborted the download')
+ return
+ }
+
+ controller.enqueue(data)
+ }
+ },
+ cancel() {
+ console.log('user aborted')
+ },
+ })
+}
+
+self.onfetch = (event) => {
+ const url = event.request.url
+
+ // this only works for Firefox
+ if (url.endsWith('/ping')) {
+ return event.respondWith(new Response('pong'))
+ }
+
+ const hijacke = map.get(url)
+
+ if (!hijacke) return null
+
+ const [stream, data, port] = hijacke
+
+ map.delete(url)
+
+ // Not comfortable letting any user control all headers
+ // so we only copy over the length & disposition
+ const responseHeaders = new Headers({
+ 'Content-Type': 'application/octet-stream; charset=utf-8',
+
+ // To be on the safe side, The link can be opened in a iframe.
+ // but octet-stream should stop it.
+ 'Content-Security-Policy': "default-src 'none'",
+ 'X-Content-Security-Policy': "default-src 'none'",
+ 'X-WebKit-CSP': "default-src 'none'",
+ 'X-XSS-Protection': '1; mode=block',
+ })
+
+ let headers = new Headers(data.headers || {})
+
+ if (headers.has('Content-Length')) {
+ responseHeaders.set('Content-Length', headers.get('Content-Length'))
+ }
+
+ if (headers.has('Content-Disposition')) {
+ responseHeaders.set(
+ 'Content-Disposition',
+ headers.get('Content-Disposition'),
+ )
+ }
+
+ // data, data.filename and size should not be used anymore
+ if (data.size) {
+ console.warn('Depricated')
+ responseHeaders.set('Content-Length', data.size)
+ }
+
+ let fileName = typeof data === 'string' ? data : data.filename
+ if (fileName) {
+ console.warn('Depricated')
+ // Make filename RFC5987 compatible
+ fileName = encodeURIComponent(fileName)
+ .replace(/['()]/g, escape)
+ .replace(/\*/g, '%2A')
+ responseHeaders.set(
+ 'Content-Disposition',
+ "attachment; filename*=UTF-8''" + fileName,
+ )
+ }
+
+ event.respondWith(new Response(stream, { headers: responseHeaders }))
+
+ port.postMessage({ debug: 'Download started' })
+}
diff --git a/src/components/Downloader.tsx b/src/components/Downloader.tsx
index 0ab7b3d..62d9693 100644
--- a/src/components/Downloader.tsx
+++ b/src/components/Downloader.tsx
@@ -10,6 +10,69 @@ import {
} from 'react-device-detect'
import * as t from 'io-ts'
import { decodeMessage, Message, MessageType } from '../messages'
+import { createZipStream } from '../zip-stream'
+
+const baseURL = process.env.NEXT_PUBLIC_BASE_URL ?? 'http://localhost:3000'
+
+// eslint-disable-next-line @typescript-eslint/no-var-requires
+if (process.browser) require('web-streams-polyfill/ponyfill')
+
+// eslint-disable-next-line @typescript-eslint/no-var-requires
+const streamSaver = process.browser ? require('streamsaver') : null
+if (process.browser) {
+ streamSaver.mitm = baseURL + '/stream.html'
+}
+
+function getZipFilename(): string {
+ return `filepizza-download-${Date.now()}.zip`
+}
+
+type DownloadFileStream = {
+ name: string
+ size: number
+ stream: () => ReadableStream
+}
+
+export async function streamDownloadSingleFile(
+ file: DownloadFileStream,
+): Promise {
+ const fileStream = streamSaver.createWriteStream(file.name, {
+ size: file.size,
+ })
+
+ const writer = fileStream.getWriter()
+ const reader = file.stream().getReader()
+
+ const pump = async () => {
+ const res = await reader.read()
+ return res.done ? writer.close() : writer.write(res.value).then(pump)
+ }
+ await pump()
+}
+
+export function streamDownloadMultipleFiles(
+ files: Array,
+): Promise {
+ const filename = getZipFilename()
+ const totalSize = files.reduce((acc, file) => acc + file.size, 0)
+ const fileStream = streamSaver.createWriteStream(filename, {
+ size: totalSize,
+ })
+
+ const readableZipStream = createZipStream({
+ start(ctrl) {
+ for (const file of files) {
+ ctrl.enqueue(file)
+ }
+ ctrl.close()
+ },
+ async pull(_ctrl) {
+ // Gets executed everytime zip-stream asks for more data
+ },
+ })
+
+ return readableZipStream.pipeTo(fileStream)
+}
export default function Downloader({
uploaderPeerID,
@@ -21,6 +84,7 @@ export default function Downloader({
const [password, setPassword] = useState('')
const [shouldAttemptConnection, setShouldAttemptConnection] = useState(false)
const [open, setOpen] = useState(false)
+ const [downloading, setDownloading] = useState(false)
const [errorMessage, setErrorMessage] = useState(null)
useEffect(() => {
@@ -36,7 +100,7 @@ export default function Downloader({
setOpen(true)
const request: t.TypeOf = {
- type: MessageType.Start,
+ type: MessageType.RequestInfo,
browserName: browserName,
browserVersion: browserVersion,
osName: osName,
@@ -70,6 +134,7 @@ export default function Downloader({
conn.on('close', () => {
setOpen(false)
+ setDownloading(false)
setShouldAttemptConnection(false)
})
@@ -85,24 +150,46 @@ export default function Downloader({
[],
)
- const handleSubmit = useCallback((ev) => {
+ const handleSubmitPassword = useCallback((ev) => {
ev.preventDefault()
setShouldAttemptConnection(true)
}, [])
- if (open) {
+ const handleStartDownload = useCallback(() => {
+ setDownloading(true)
+
+ // TODO(@kern): Download each file as a ReadableStream
+ // const blob = new Blob(['support blobs too'])
+ // const file = {
+ // name: 'blob-example.txt',
+ // size: 12,
+ // stream: () => blob.stream(),
+ // }
+ // streamDownloadSingleFile(file)
+ // streamDownloadMultipleFiles([file])
+ }, [])
+
+ if (downloading) {
return Downloading
}
+ if (open) {
+ return (
+
+
+
+ )
+ }
+
if (shouldAttemptConnection) {
return Loading...
}
return (
-
)
}
diff --git a/src/zip-stream.ts b/src/zip-stream.ts
new file mode 100644
index 0000000..86464af
--- /dev/null
+++ b/src/zip-stream.ts
@@ -0,0 +1,235 @@
+// Based on https://github.com/jimmywarting/StreamSaver.js/blob/master/examples/zip-stream.js
+
+class Crc32 {
+ crc: number
+
+ constructor() {
+ this.crc = -1
+ }
+
+ table = (() => {
+ let i
+ let j
+ let t
+ const table = []
+ for (i = 0; i < 256; i++) {
+ t = i
+ for (j = 0; j < 8; j++) {
+ t = t & 1 ? (t >>> 1) ^ 0xedb88320 : t >>> 1
+ }
+ table[i] = t
+ }
+ return table
+ })()
+
+ append(data) {
+ let crc = this.crc | 0
+ const table = this.table
+ for (let offset = 0, len = data.length | 0; offset < len; offset++) {
+ crc = (crc >>> 8) ^ table[(crc ^ data[offset]) & 0xff]
+ }
+ this.crc = crc
+ }
+
+ get() {
+ return ~this.crc
+ }
+}
+
+const getDataHelper = (byteLength) => {
+ const uint8 = new Uint8Array(byteLength)
+ return {
+ array: uint8,
+ view: new DataView(uint8.buffer),
+ }
+}
+
+const pump = (zipObj) =>
+ zipObj.reader.read().then((chunk) => {
+ if (chunk.done) return zipObj.writeFooter()
+ const outputData = chunk.value
+ zipObj.crc.append(outputData)
+ zipObj.uncompressedLength += outputData.length
+ zipObj.compressedLength += outputData.length
+ zipObj.ctrl.enqueue(outputData)
+ })
+
+export function createZipStream(underlyingSource) {
+ const files = Object.create(null)
+ const filenames = []
+ const encoder = new TextEncoder()
+ let offset = 0
+ let activeZipIndex = 0
+ let ctrl
+ let activeZipObject, closed
+
+ function next() {
+ activeZipIndex++
+ activeZipObject = files[filenames[activeZipIndex]]
+ // eslint-disable-next-line @typescript-eslint/no-use-before-define
+ if (activeZipObject) processNextChunk()
+ // eslint-disable-next-line @typescript-eslint/no-use-before-define
+ else if (closed) closeZip()
+ }
+
+ const zipWriter = {
+ enqueue(fileLike) {
+ if (closed)
+ throw new TypeError(
+ 'Cannot enqueue a chunk into a readable stream that is closed or has been requested to be closed',
+ )
+
+ let name = fileLike.name.trim()
+ const date = new Date(
+ typeof fileLike.lastModified === 'undefined'
+ ? Date.now()
+ : fileLike.lastModified,
+ )
+
+ if (fileLike.directory && !name.endsWith('/')) name += '/'
+ if (files[name]) throw new Error('File already exists.')
+
+ const nameBuf = encoder.encode(name)
+ filenames.push(name)
+
+ const zipObject = (files[name] = {
+ level: 0,
+ ctrl,
+ directory: !!fileLike.directory,
+ nameBuf,
+ comment: encoder.encode(fileLike.comment || ''),
+ compressedLength: 0,
+ uncompressedLength: 0,
+ writeHeader() {
+ const header = getDataHelper(26)
+ const data = getDataHelper(30 + nameBuf.length)
+
+ zipObject.offset = offset
+ zipObject.header = header
+ if (zipObject.level !== 0 && !zipObject.directory) {
+ header.view.setUint16(4, 0x0800)
+ }
+ header.view.setUint32(0, 0x14000808)
+ header.view.setUint16(
+ 6,
+ (((date.getHours() << 6) | date.getMinutes()) << 5) |
+ (date.getSeconds() / 2),
+ true,
+ )
+ header.view.setUint16(
+ 8,
+ ((((date.getFullYear() - 1980) << 4) | (date.getMonth() + 1)) <<
+ 5) |
+ date.getDate(),
+ true,
+ )
+ header.view.setUint16(22, nameBuf.length, true)
+ data.view.setUint32(0, 0x504b0304)
+ data.array.set(header.array, 4)
+ data.array.set(nameBuf, 30)
+ offset += data.array.length
+ ctrl.enqueue(data.array)
+ },
+ writeFooter() {
+ const footer = getDataHelper(16)
+ footer.view.setUint32(0, 0x504b0708)
+
+ if (zipObject.crc) {
+ zipObject.header.view.setUint32(10, zipObject.crc.get(), true)
+ zipObject.header.view.setUint32(
+ 14,
+ zipObject.compressedLength,
+ true,
+ )
+ zipObject.header.view.setUint32(
+ 18,
+ zipObject.uncompressedLength,
+ true,
+ )
+ footer.view.setUint32(4, zipObject.crc.get(), true)
+ footer.view.setUint32(8, zipObject.compressedLength, true)
+ footer.view.setUint32(12, zipObject.uncompressedLength, true)
+ }
+
+ ctrl.enqueue(footer.array)
+ offset += zipObject.compressedLength + 16
+ next()
+ },
+ fileLike,
+ })
+
+ if (!activeZipObject) {
+ activeZipObject = zipObject
+ // eslint-disable-next-line @typescript-eslint/no-use-before-define
+ processNextChunk()
+ }
+ },
+ close() {
+ if (closed)
+ throw new TypeError(
+ 'Cannot close a readable stream that has already been requested to be closed',
+ )
+ // eslint-disable-next-line @typescript-eslint/no-use-before-define
+ if (!activeZipObject) closeZip()
+ closed = true
+ },
+ }
+
+ function closeZip() {
+ let length = 0
+ let index = 0
+ let indexFilename, file
+ for (indexFilename = 0; indexFilename < filenames.length; indexFilename++) {
+ file = files[filenames[indexFilename]]
+ length += 46 + file.nameBuf.length + file.comment.length
+ }
+ const data = getDataHelper(length + 22)
+ for (indexFilename = 0; indexFilename < filenames.length; indexFilename++) {
+ file = files[filenames[indexFilename]]
+ data.view.setUint32(index, 0x504b0102)
+ data.view.setUint16(index + 4, 0x1400)
+ data.array.set(file.header.array, index + 6)
+ data.view.setUint16(index + 32, file.comment.length, true)
+ if (file.directory) {
+ data.view.setUint8(index + 38, 0x10)
+ }
+ data.view.setUint32(index + 42, file.offset, true)
+ data.array.set(file.nameBuf, index + 46)
+ data.array.set(file.comment, index + 46 + file.nameBuf.length)
+ index += 46 + file.nameBuf.length + file.comment.length
+ }
+ data.view.setUint32(index, 0x504b0506)
+ data.view.setUint16(index + 8, filenames.length, true)
+ data.view.setUint16(index + 10, filenames.length, true)
+ data.view.setUint32(index + 12, length, true)
+ data.view.setUint32(index + 16, offset, true)
+ ctrl.enqueue(data.array)
+ ctrl.close()
+ }
+
+ function processNextChunk() {
+ if (!activeZipObject) return
+ if (activeZipObject.directory)
+ return activeZipObject.writeFooter(activeZipObject.writeHeader())
+ if (activeZipObject.reader) return pump(activeZipObject)
+ if (activeZipObject.fileLike.stream) {
+ activeZipObject.crc = new Crc32()
+ activeZipObject.reader = activeZipObject.fileLike.stream().getReader()
+ activeZipObject.writeHeader()
+ } else next()
+ }
+ return new ReadableStream({
+ start: (c) => {
+ ctrl = c
+ underlyingSource.start &&
+ Promise.resolve(underlyingSource.start(zipWriter))
+ },
+ pull() {
+ return (
+ processNextChunk() ||
+ (underlyingSource.pull &&
+ Promise.resolve(underlyingSource.pull(zipWriter)))
+ )
+ },
+ })
+}
diff --git a/yarn.lock b/yarn.lock
index 87722fc..5017264 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -7056,6 +7056,11 @@ stream-shift@^1.0.0:
resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.1.tgz#d7088281559ab2778424279b0877da3c392d5a3d"
integrity sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==
+streamsaver@^2.0.5:
+ version "2.0.5"
+ resolved "https://registry.yarnpkg.com/streamsaver/-/streamsaver-2.0.5.tgz#3212f0e908fcece5b3a65591094475cf87850d00"
+ integrity sha512-KIWtBvi8A6FiFZGNSyuIZRZM6C8AvnWTiCx/TYa7so420vC5sQwcBKkdqInuGWoWMfeWy/P+/cRqMtWVf4RW9w==
+
string-argv@0.3.1:
version "0.3.1"
resolved "https://registry.yarnpkg.com/string-argv/-/string-argv-0.3.1.tgz#95e2fbec0427ae19184935f816d74aaa4c5c19da"
@@ -7816,6 +7821,11 @@ watchpack@^1.7.4:
chokidar "^3.4.1"
watchpack-chokidar2 "^2.0.0"
+web-streams-polyfill@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-3.0.1.tgz#1f836eea307e8f4af15758ee473c7af755eb879e"
+ integrity sha512-M+EmTdszMWINywOZaqpZ6VIEDUmNpRaTOuizF0ZKPjSDC8paMRe/jBBwFv0Yeyn5WYnM5pMqMQa82vpaE+IJRw==
+
web-vitals@0.2.1:
version "0.2.1"
resolved "https://registry.yarnpkg.com/web-vitals/-/web-vitals-0.2.1.tgz#60782fa690243fe35613759a0c26431f57ba7b2d"