Add streamsaver

pull/152/head
Alex Kern 5 years ago
parent a0a1d8643c
commit 1ceca18cc0
No known key found for this signature in database
GPG Key ID: F3141D5EDF48F89F

@ -32,9 +32,11 @@
"react-device-detect": "^1.15.0",
"react-dom": "^16.13.1",
"react-qr": "0.0.2",
"streamsaver": "^2.0.5",
"styled-components": "^5.2.0",
"twilio": "^2.9.1",
"use-http": "^1.0.16",
"web-streams-polyfill": "^3.0.1",
"webrtcsupport": "^2.2.0",
"xkcd-password": "^1.2.0"
},

@ -0,0 +1,168 @@
<!--
https://github.com/jimmywarting/StreamSaver.js/blob/master/mitm.html
mitm.html is the lite "man in the middle"
This is only meant to signal the opener's messageChannel to
the service worker - when that is done this mitm can be closed
but it's better to keep it alive since this also stops the sw
from restarting
The service worker is capable of intercepting all request and fork their
own "fake" response - wish we are going to craft
when the worker then receives a stream then the worker will tell the opener
to open up a link that will start the download
-->
<script>
// This will prevent the sw from restarting
let keepAlive = () => {
keepAlive = () => {}
var ping = location.href.substr(0, location.href.lastIndexOf('/')) + '/ping'
var interval = setInterval(() => {
if (sw) {
sw.postMessage('ping')
} else {
fetch(ping).then(res => res.text(!res.ok && clearInterval(interval)))
}
}, 10000)
}
// message event is the first thing we need to setup a listner for
// don't want the opener to do a random timeout - instead they can listen for
// the ready event
// but since we need to wait for the Service Worker registration, we store the
// message for later
let messages = []
window.onmessage = evt => messages.push(evt)
let sw = null
let scope = ''
function registerWorker() {
return navigator.serviceWorker.getRegistration('./').then(swReg => {
return swReg || navigator.serviceWorker.register('sw.js', { scope: './' })
}).then(swReg => {
const swRegTmp = swReg.installing || swReg.waiting
scope = swReg.scope
return (sw = swReg.active) || new Promise(resolve => {
swRegTmp.addEventListener('statechange', fn = () => {
if (swRegTmp.state === 'activated') {
swRegTmp.removeEventListener('statechange', fn)
sw = swReg.active
resolve()
}
})
})
})
}
// Now that we have the Service Worker registered we can process messages
function onMessage (event) {
let { data, ports, origin } = event
// It's important to have a messageChannel, don't want to interfere
// with other simultaneous downloads
if (!ports || !ports.length) {
throw new TypeError("[StreamSaver] You didn't send a messageChannel")
}
if (typeof data !== 'object') {
throw new TypeError("[StreamSaver] You didn't send a object")
}
// the default public service worker for StreamSaver is shared among others.
// so all download links needs to be prefixed to avoid any other conflict
data.origin = origin
// if we ever (in some feature versoin of streamsaver) would like to
// redirect back to the page of who initiated a http request
data.referrer = data.referrer || document.referrer || origin
// pass along version for possible backwards compatibility in sw.js
data.streamSaverVersion = new URLSearchParams(location.search).get('version')
if (data.streamSaverVersion === '1.2.0') {
console.warn('[StreamSaver] please update streamsaver')
}
/** @since v2.0.0 */
if (!data.headers) {
console.warn("[StreamSaver] pass `data.headers` that you would like to pass along to the service worker\nit should be a 2D array or a key/val object that fetch's Headers api accepts")
} else {
// test if it's correct
// should thorw a typeError if not
new Headers(data.headers)
}
/** @since v2.0.0 */
if (typeof data.filename === 'string') {
console.warn("[StreamSaver] You shouldn't send `data.filename` anymore. It should be included in the Content-Disposition header option")
// Do what File constructor do with fileNames
data.filename = data.filename.replace(/\//g, ':')
}
/** @since v2.0.0 */
if (data.size) {
console.warn("[StreamSaver] You shouldn't send `data.size` anymore. It should be included in the content-length header option")
}
/** @since v2.0.0 */
if (data.readableStream) {
console.warn("[StreamSaver] You should send the readableStream in the messageChannel, not throught mitm")
}
/** @since v2.0.0 */
if (!data.pathname) {
console.warn("[StreamSaver] Please send `data.pathname` (eg: /pictures/summer.jpg)")
data.pathname = Math.random().toString().slice(-6) + '/' + data.filename
}
// remove all leading slashes
data.pathname = data.pathname.replace(/^\/+/g, '')
// remove protocol
let org = origin.replace(/(^\w+:|^)\/\//, '')
// set the absolute pathname to the download url.
data.url = new URL(`${scope + org}/${data.pathname}`).toString()
if (!data.url.startsWith(`${scope + org}/`)) {
throw new TypeError('[StreamSaver] bad `data.pathname`')
}
// This sends the message data as well as transferring
// messageChannel.port2 to the service worker. The service worker can
// then use the transferred port to reply via postMessage(), which
// will in turn trigger the onmessage handler on messageChannel.port1.
const transferable = data.readableStream
? [ ports[0], data.readableStream ]
: [ ports[0] ]
if (!(data.readableStream || data.transferringReadable)) {
keepAlive()
}
return sw.postMessage(data, transferable)
}
if (window.opener) {
// The opener can't listen to onload event, so we need to help em out!
// (telling them that we are ready to accept postMessage's)
window.opener.postMessage('StreamSaver::loadedPopup', '*')
}
if (navigator.serviceWorker) {
registerWorker().then(() => {
window.onmessage = onMessage
messages.forEach(window.onmessage)
})
} else {
// FF can ping sw with fetch from a secure hidden iframe
// shouldn't really be possible?
keepAlive()
}
</script>

@ -0,0 +1,143 @@
// https://github.com/jimmywarting/StreamSaver.js/blob/master/sw.js
/* global self ReadableStream Response */
self.addEventListener('install', () => {
self.skipWaiting()
})
self.addEventListener('activate', (event) => {
event.waitUntil(self.clients.claim())
})
const map = new Map()
// This should be called once per download
// Each event has a dataChannel that the data will be piped through
self.onmessage = (event) => {
// We send a heartbeat every x secound to keep the
// service worker alive if a transferable stream is not sent
if (event.data === 'ping') {
return
}
const data = event.data
const downloadUrl =
data.url ||
self.registration.scope +
Math.random() +
'/' +
(typeof data === 'string' ? data : data.filename)
const port = event.ports[0]
const metadata = new Array(3) // [stream, data, port]
metadata[1] = data
metadata[2] = port
// Note to self:
// old streamsaver v1.2.0 might still use `readableStream`...
// but v2.0.0 will always transfer the stream throught MessageChannel #94
if (event.data.readableStream) {
metadata[0] = event.data.readableStream
} else if (event.data.transferringReadable) {
port.onmessage = (evt) => {
port.onmessage = null
metadata[0] = evt.data.readableStream
}
} else {
metadata[0] = createStream(port)
}
map.set(downloadUrl, metadata)
port.postMessage({ download: downloadUrl })
}
function createStream(port) {
// ReadableStream is only supported by chrome 52
return new ReadableStream({
start(controller) {
// When we receive data on the messageChannel, we write
port.onmessage = ({ data }) => {
if (data === 'end') {
return controller.close()
}
if (data === 'abort') {
controller.error('Aborted the download')
return
}
controller.enqueue(data)
}
},
cancel() {
console.log('user aborted')
},
})
}
self.onfetch = (event) => {
const url = event.request.url
// this only works for Firefox
if (url.endsWith('/ping')) {
return event.respondWith(new Response('pong'))
}
const hijacke = map.get(url)
if (!hijacke) return null
const [stream, data, port] = hijacke
map.delete(url)
// Not comfortable letting any user control all headers
// so we only copy over the length & disposition
const responseHeaders = new Headers({
'Content-Type': 'application/octet-stream; charset=utf-8',
// To be on the safe side, The link can be opened in a iframe.
// but octet-stream should stop it.
'Content-Security-Policy': "default-src 'none'",
'X-Content-Security-Policy': "default-src 'none'",
'X-WebKit-CSP': "default-src 'none'",
'X-XSS-Protection': '1; mode=block',
})
let headers = new Headers(data.headers || {})
if (headers.has('Content-Length')) {
responseHeaders.set('Content-Length', headers.get('Content-Length'))
}
if (headers.has('Content-Disposition')) {
responseHeaders.set(
'Content-Disposition',
headers.get('Content-Disposition'),
)
}
// data, data.filename and size should not be used anymore
if (data.size) {
console.warn('Depricated')
responseHeaders.set('Content-Length', data.size)
}
let fileName = typeof data === 'string' ? data : data.filename
if (fileName) {
console.warn('Depricated')
// Make filename RFC5987 compatible
fileName = encodeURIComponent(fileName)
.replace(/['()]/g, escape)
.replace(/\*/g, '%2A')
responseHeaders.set(
'Content-Disposition',
"attachment; filename*=UTF-8''" + fileName,
)
}
event.respondWith(new Response(stream, { headers: responseHeaders }))
port.postMessage({ debug: 'Download started' })
}

@ -10,6 +10,69 @@ import {
} from 'react-device-detect'
import * as t from 'io-ts'
import { decodeMessage, Message, MessageType } from '../messages'
import { createZipStream } from '../zip-stream'
const baseURL = process.env.NEXT_PUBLIC_BASE_URL ?? 'http://localhost:3000'
// eslint-disable-next-line @typescript-eslint/no-var-requires
if (process.browser) require('web-streams-polyfill/ponyfill')
// eslint-disable-next-line @typescript-eslint/no-var-requires
const streamSaver = process.browser ? require('streamsaver') : null
if (process.browser) {
streamSaver.mitm = baseURL + '/stream.html'
}
function getZipFilename(): string {
return `filepizza-download-${Date.now()}.zip`
}
type DownloadFileStream = {
name: string
size: number
stream: () => ReadableStream
}
export async function streamDownloadSingleFile(
file: DownloadFileStream,
): Promise<void> {
const fileStream = streamSaver.createWriteStream(file.name, {
size: file.size,
})
const writer = fileStream.getWriter()
const reader = file.stream().getReader()
const pump = async () => {
const res = await reader.read()
return res.done ? writer.close() : writer.write(res.value).then(pump)
}
await pump()
}
export function streamDownloadMultipleFiles(
files: Array<DownloadFileStream>,
): Promise<void> {
const filename = getZipFilename()
const totalSize = files.reduce((acc, file) => acc + file.size, 0)
const fileStream = streamSaver.createWriteStream(filename, {
size: totalSize,
})
const readableZipStream = createZipStream({
start(ctrl) {
for (const file of files) {
ctrl.enqueue(file)
}
ctrl.close()
},
async pull(_ctrl) {
// Gets executed everytime zip-stream asks for more data
},
})
return readableZipStream.pipeTo(fileStream)
}
export default function Downloader({
uploaderPeerID,
@ -21,6 +84,7 @@ export default function Downloader({
const [password, setPassword] = useState('')
const [shouldAttemptConnection, setShouldAttemptConnection] = useState(false)
const [open, setOpen] = useState(false)
const [downloading, setDownloading] = useState(false)
const [errorMessage, setErrorMessage] = useState<string | null>(null)
useEffect(() => {
@ -36,7 +100,7 @@ export default function Downloader({
setOpen(true)
const request: t.TypeOf<typeof Message> = {
type: MessageType.Start,
type: MessageType.RequestInfo,
browserName: browserName,
browserVersion: browserVersion,
osName: osName,
@ -70,6 +134,7 @@ export default function Downloader({
conn.on('close', () => {
setOpen(false)
setDownloading(false)
setShouldAttemptConnection(false)
})
@ -85,24 +150,46 @@ export default function Downloader({
[],
)
const handleSubmit = useCallback((ev) => {
const handleSubmitPassword = useCallback((ev) => {
ev.preventDefault()
setShouldAttemptConnection(true)
}, [])
if (open) {
const handleStartDownload = useCallback(() => {
setDownloading(true)
// TODO(@kern): Download each file as a ReadableStream
// const blob = new Blob(['support blobs too'])
// const file = {
// name: 'blob-example.txt',
// size: 12,
// stream: () => blob.stream(),
// }
// streamDownloadSingleFile(file)
// streamDownloadMultipleFiles([file])
}, [])
if (downloading) {
return <div>Downloading</div>
}
if (open) {
return (
<div>
<button onClick={handleStartDownload}>Download</button>
</div>
)
}
if (shouldAttemptConnection) {
return <div>Loading...</div>
}
return (
<form action="#" method="post" onSubmit={handleSubmit}>
<form action="#" method="post" onSubmit={handleSubmitPassword}>
{errorMessage && <div style={{ color: 'red' }}>{errorMessage}</div>}
<input type="password" value={password} onChange={handleChangePassword} />
<button>Start</button>
<button>Unlock</button>
</form>
)
}

@ -0,0 +1,235 @@
// Based on https://github.com/jimmywarting/StreamSaver.js/blob/master/examples/zip-stream.js
class Crc32 {
crc: number
constructor() {
this.crc = -1
}
table = (() => {
let i
let j
let t
const table = []
for (i = 0; i < 256; i++) {
t = i
for (j = 0; j < 8; j++) {
t = t & 1 ? (t >>> 1) ^ 0xedb88320 : t >>> 1
}
table[i] = t
}
return table
})()
append(data) {
let crc = this.crc | 0
const table = this.table
for (let offset = 0, len = data.length | 0; offset < len; offset++) {
crc = (crc >>> 8) ^ table[(crc ^ data[offset]) & 0xff]
}
this.crc = crc
}
get() {
return ~this.crc
}
}
const getDataHelper = (byteLength) => {
const uint8 = new Uint8Array(byteLength)
return {
array: uint8,
view: new DataView(uint8.buffer),
}
}
const pump = (zipObj) =>
zipObj.reader.read().then((chunk) => {
if (chunk.done) return zipObj.writeFooter()
const outputData = chunk.value
zipObj.crc.append(outputData)
zipObj.uncompressedLength += outputData.length
zipObj.compressedLength += outputData.length
zipObj.ctrl.enqueue(outputData)
})
export function createZipStream(underlyingSource) {
const files = Object.create(null)
const filenames = []
const encoder = new TextEncoder()
let offset = 0
let activeZipIndex = 0
let ctrl
let activeZipObject, closed
function next() {
activeZipIndex++
activeZipObject = files[filenames[activeZipIndex]]
// eslint-disable-next-line @typescript-eslint/no-use-before-define
if (activeZipObject) processNextChunk()
// eslint-disable-next-line @typescript-eslint/no-use-before-define
else if (closed) closeZip()
}
const zipWriter = {
enqueue(fileLike) {
if (closed)
throw new TypeError(
'Cannot enqueue a chunk into a readable stream that is closed or has been requested to be closed',
)
let name = fileLike.name.trim()
const date = new Date(
typeof fileLike.lastModified === 'undefined'
? Date.now()
: fileLike.lastModified,
)
if (fileLike.directory && !name.endsWith('/')) name += '/'
if (files[name]) throw new Error('File already exists.')
const nameBuf = encoder.encode(name)
filenames.push(name)
const zipObject = (files[name] = {
level: 0,
ctrl,
directory: !!fileLike.directory,
nameBuf,
comment: encoder.encode(fileLike.comment || ''),
compressedLength: 0,
uncompressedLength: 0,
writeHeader() {
const header = getDataHelper(26)
const data = getDataHelper(30 + nameBuf.length)
zipObject.offset = offset
zipObject.header = header
if (zipObject.level !== 0 && !zipObject.directory) {
header.view.setUint16(4, 0x0800)
}
header.view.setUint32(0, 0x14000808)
header.view.setUint16(
6,
(((date.getHours() << 6) | date.getMinutes()) << 5) |
(date.getSeconds() / 2),
true,
)
header.view.setUint16(
8,
((((date.getFullYear() - 1980) << 4) | (date.getMonth() + 1)) <<
5) |
date.getDate(),
true,
)
header.view.setUint16(22, nameBuf.length, true)
data.view.setUint32(0, 0x504b0304)
data.array.set(header.array, 4)
data.array.set(nameBuf, 30)
offset += data.array.length
ctrl.enqueue(data.array)
},
writeFooter() {
const footer = getDataHelper(16)
footer.view.setUint32(0, 0x504b0708)
if (zipObject.crc) {
zipObject.header.view.setUint32(10, zipObject.crc.get(), true)
zipObject.header.view.setUint32(
14,
zipObject.compressedLength,
true,
)
zipObject.header.view.setUint32(
18,
zipObject.uncompressedLength,
true,
)
footer.view.setUint32(4, zipObject.crc.get(), true)
footer.view.setUint32(8, zipObject.compressedLength, true)
footer.view.setUint32(12, zipObject.uncompressedLength, true)
}
ctrl.enqueue(footer.array)
offset += zipObject.compressedLength + 16
next()
},
fileLike,
})
if (!activeZipObject) {
activeZipObject = zipObject
// eslint-disable-next-line @typescript-eslint/no-use-before-define
processNextChunk()
}
},
close() {
if (closed)
throw new TypeError(
'Cannot close a readable stream that has already been requested to be closed',
)
// eslint-disable-next-line @typescript-eslint/no-use-before-define
if (!activeZipObject) closeZip()
closed = true
},
}
function closeZip() {
let length = 0
let index = 0
let indexFilename, file
for (indexFilename = 0; indexFilename < filenames.length; indexFilename++) {
file = files[filenames[indexFilename]]
length += 46 + file.nameBuf.length + file.comment.length
}
const data = getDataHelper(length + 22)
for (indexFilename = 0; indexFilename < filenames.length; indexFilename++) {
file = files[filenames[indexFilename]]
data.view.setUint32(index, 0x504b0102)
data.view.setUint16(index + 4, 0x1400)
data.array.set(file.header.array, index + 6)
data.view.setUint16(index + 32, file.comment.length, true)
if (file.directory) {
data.view.setUint8(index + 38, 0x10)
}
data.view.setUint32(index + 42, file.offset, true)
data.array.set(file.nameBuf, index + 46)
data.array.set(file.comment, index + 46 + file.nameBuf.length)
index += 46 + file.nameBuf.length + file.comment.length
}
data.view.setUint32(index, 0x504b0506)
data.view.setUint16(index + 8, filenames.length, true)
data.view.setUint16(index + 10, filenames.length, true)
data.view.setUint32(index + 12, length, true)
data.view.setUint32(index + 16, offset, true)
ctrl.enqueue(data.array)
ctrl.close()
}
function processNextChunk() {
if (!activeZipObject) return
if (activeZipObject.directory)
return activeZipObject.writeFooter(activeZipObject.writeHeader())
if (activeZipObject.reader) return pump(activeZipObject)
if (activeZipObject.fileLike.stream) {
activeZipObject.crc = new Crc32()
activeZipObject.reader = activeZipObject.fileLike.stream().getReader()
activeZipObject.writeHeader()
} else next()
}
return new ReadableStream({
start: (c) => {
ctrl = c
underlyingSource.start &&
Promise.resolve(underlyingSource.start(zipWriter))
},
pull() {
return (
processNextChunk() ||
(underlyingSource.pull &&
Promise.resolve(underlyingSource.pull(zipWriter)))
)
},
})
}

@ -7056,6 +7056,11 @@ stream-shift@^1.0.0:
resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.1.tgz#d7088281559ab2778424279b0877da3c392d5a3d"
integrity sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==
streamsaver@^2.0.5:
version "2.0.5"
resolved "https://registry.yarnpkg.com/streamsaver/-/streamsaver-2.0.5.tgz#3212f0e908fcece5b3a65591094475cf87850d00"
integrity sha512-KIWtBvi8A6FiFZGNSyuIZRZM6C8AvnWTiCx/TYa7so420vC5sQwcBKkdqInuGWoWMfeWy/P+/cRqMtWVf4RW9w==
string-argv@0.3.1:
version "0.3.1"
resolved "https://registry.yarnpkg.com/string-argv/-/string-argv-0.3.1.tgz#95e2fbec0427ae19184935f816d74aaa4c5c19da"
@ -7816,6 +7821,11 @@ watchpack@^1.7.4:
chokidar "^3.4.1"
watchpack-chokidar2 "^2.0.0"
web-streams-polyfill@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-3.0.1.tgz#1f836eea307e8f4af15758ee473c7af755eb879e"
integrity sha512-M+EmTdszMWINywOZaqpZ6VIEDUmNpRaTOuizF0ZKPjSDC8paMRe/jBBwFv0Yeyn5WYnM5pMqMQa82vpaE+IJRw==
web-vitals@0.2.1:
version "0.2.1"
resolved "https://registry.yarnpkg.com/web-vitals/-/web-vitals-0.2.1.tgz#60782fa690243fe35613759a0c26431f57ba7b2d"

Loading…
Cancel
Save