diff --git a/src/components/popups/avatar.ts b/src/components/popups/avatar.ts index 3324afc4..2e95dcb0 100644 --- a/src/components/popups/avatar.ts +++ b/src/components/popups/avatar.ts @@ -9,6 +9,7 @@ import resizeableImage from "../../lib/cropper"; import PopupElement from "."; import { ripple } from "../ripple"; import { _i18n } from "../../lib/langPack"; +import { readBlobAsDataURL } from "../../helpers/blob"; export default class PopupAvatar extends PopupElement { private cropContainer: HTMLElement; @@ -49,11 +50,8 @@ export default class PopupAvatar extends PopupElement { if(!file) { return; } - - const reader = new FileReader(); - reader.onload = (e) => { - const contents = e.target.result as string; - + + readBlobAsDataURL(file).then(contents => { this.image = new Image(); this.cropContainer.append(this.image); this.image.src = contents; @@ -67,9 +65,7 @@ export default class PopupAvatar extends PopupElement { this.cropper = resizeableImage(this.image, this.canvas); this.input.value = ''; }; - }; - - reader.readAsDataURL(file); + }); }, false); this.btnSubmit = document.createElement('button'); diff --git a/src/helpers/blob.ts b/src/helpers/blob.ts index 62ac80d2..e38c260f 100644 --- a/src/helpers/blob.ts +++ b/src/helpers/blob.ts @@ -9,16 +9,32 @@ * https://github.com/zhukov/webogram/blob/master/LICENSE */ -export const readBlobAsText = (blob: Blob) => { - return new Promise(resolve => { +export function readBlobAs(blob: Blob, method: 'readAsText'): Promise; +export function readBlobAs(blob: Blob, method: 'readAsDataURL'): Promise; +export function readBlobAs(blob: Blob, method: 'readAsArrayBuffer'): Promise; +export function readBlobAs(blob: Blob, method: 'readAsArrayBuffer' | 'readAsText' | 'readAsDataURL'): Promise { + return new Promise((resolve) => { const reader = new FileReader(); - reader.addEventListener('loadend', (e) => { - // @ts-ignore - resolve(e.srcElement.result); - }); - reader.readAsText(blob); + reader.addEventListener('loadend', (e) => resolve(e.target.result)); + reader[method](blob); }); -}; +} + +export function readBlobAsText(blob: Blob) { + return readBlobAs(blob, 'readAsText'); +} + +export function readBlobAsDataURL(blob: Blob) { + return readBlobAs(blob, 'readAsDataURL'); +} + +export function readBlobAsArrayBuffer(blob: Blob) { + return readBlobAs(blob, 'readAsArrayBuffer'); +} + +export function readBlobAsUint8Array(blob: Blob) { + return readBlobAsArrayBuffer(blob).then(buffer => new Uint8Array(buffer)); +} export function blobConstruct(blobParts: any, mimeType: string = ''): Blob { let blob; diff --git a/src/lib/cacheStorage.ts b/src/lib/cacheStorage.ts index 804ea046..6bfdba60 100644 --- a/src/lib/cacheStorage.ts +++ b/src/lib/cacheStorage.ts @@ -10,16 +10,17 @@ import FileManager from './filemanager'; //import { MOUNT_CLASS_TO } from './mtproto/mtproto_config'; //import { logger } from './polyfill'; +export type CacheStorageDbName = 'cachedFiles' | 'cachedStreamChunks' | 'cachedAssets'; + export default class CacheStorageController { private static STORAGES: CacheStorageController[] = []; - //public dbName = 'cachedFiles'; private openDbPromise: Promise; private useStorage = true; //private log: ReturnType = logger('CS'); - constructor(private dbName: string) { + constructor(private dbName: CacheStorageDbName) { if(Modes.test) { this.dbName += '_test'; } @@ -33,71 +34,38 @@ export default class CacheStorageController { } private openDatabase(): Promise { - if(this.openDbPromise) { - return this.openDbPromise; - } - - return this.openDbPromise = caches.open(this.dbName); + return this.openDbPromise ?? (this.openDbPromise = caches.open(this.dbName)); } public delete(entryName: string) { - return this.timeoutOperation((cache) => { - return cache.delete('/' + entryName); - }); + return this.timeoutOperation((cache) => cache.delete('/' + entryName)); } public deleteAll() { return caches.delete(this.dbName); } - public save(entryName: string, response: Response) { - if(!this.useStorage) return Promise.reject('STORAGE_OFFLINE'); - - return this.timeoutOperation((cache) => { - return cache.put('/' + entryName, response); - }); + public get(entryName: string) { + return this.timeoutOperation((cache) => cache.match('/' + entryName)); } - public saveFile(fileName: string, blob: Blob | Uint8Array) { - if(!this.useStorage) return Promise.reject('STORAGE_OFFLINE'); - - //return Promise.resolve(blobConstruct([blob])); - if(!(blob instanceof Blob)) { - blob = blobConstruct(blob) as Blob; - } - - const response = new Response(blob, { - headers: { - 'Content-Length': '' + blob.size - } - }); - - return this.save(fileName, response).then(() => { - return blob as Blob; - }); + public save(entryName: string, response: Response) { + return this.timeoutOperation((cache) => cache.put('/' + entryName, response)); } - /* public getBlobSize(blob: any) { - return blob.size || blob.byteLength || blob.length; - } */ - public getFile(fileName: string, method: 'blob' | 'json' | 'text' = 'blob'): Promise { - if(!this.useStorage) return Promise.reject('STORAGE_OFFLINE'); - /* if(method === 'blob') { return Promise.reject(); } */ // const str = `get fileName: ${fileName}`; // console.time(str); - return this.timeoutOperation(async(cache) => { - const response = await cache.match('/' + fileName); - - if(!response || !cache) { + return this.get(fileName).then((response) => { + if(!response) { //console.warn('getFile:', response, fileName); throw 'NO_ENTRY_FOUND'; } - + const promise = response[method](); // promise.then(() => { // console.timeEnd(str); @@ -106,7 +74,26 @@ export default class CacheStorageController { }); } - private timeoutOperation(callback: (cache: Cache) => Promise) { + public saveFile(fileName: string, blob: Blob | Uint8Array) { + //return Promise.resolve(blobConstruct([blob])); + if(!(blob instanceof Blob)) { + blob = blobConstruct(blob) as Blob; + } + + const response = new Response(blob, { + headers: { + 'Content-Length': '' + blob.size + } + }); + + return this.save(fileName, response).then(() => blob as Blob); + } + + public timeoutOperation(callback: (cache: Cache) => Promise) { + if(!this.useStorage) { + return Promise.reject('STORAGE_OFFLINE'); + } + return new Promise(async(resolve, reject) => { let rejected = false; const timeout = setTimeout(() => { @@ -118,6 +105,8 @@ export default class CacheStorageController { try { const cache = await this.openDatabase(); if(!cache) { + this.useStorage = false; + this.openDbPromise = undefined; throw 'no cache?'; } diff --git a/src/lib/filemanager.ts b/src/lib/filemanager.ts index 4cad8620..547edd49 100644 --- a/src/lib/filemanager.ts +++ b/src/lib/filemanager.ts @@ -9,7 +9,7 @@ * https://github.com/zhukov/webogram/blob/master/LICENSE */ -import { blobConstruct } from "../helpers/blob"; +import { blobConstruct, readBlobAsUint8Array } from "../helpers/blob"; export class FileManager { public blobSupported = true; @@ -28,17 +28,8 @@ export class FileManager { public write(fileWriter: ReturnType, bytes: Uint8Array | Blob | string): Promise { if(bytes instanceof Blob) { // is file bytes - return new Promise((resolve, reject) => { - let fileReader = new FileReader(); - fileReader.onload = function(event) { - let arrayBuffer = event.target.result as ArrayBuffer; - - let arr = new Uint8Array(arrayBuffer); - - fileWriter.write(arr).then(resolve, reject); - }; - - fileReader.readAsArrayBuffer(bytes); + return readBlobAsUint8Array(bytes).then(arr => { + return fileWriter.write(arr); }); } else { return fileWriter.write(bytes); diff --git a/src/lib/mtproto/apiFileManager.ts b/src/lib/mtproto/apiFileManager.ts index 477213f8..027c53c1 100644 --- a/src/lib/mtproto/apiFileManager.ts +++ b/src/lib/mtproto/apiFileManager.ts @@ -11,6 +11,7 @@ import { MOUNT_CLASS_TO } from "../../config/debug"; import Modes from "../../config/modes"; +import { readBlobAsArrayBuffer } from "../../helpers/blob"; import { CancellablePromise, deferredPromise } from "../../helpers/cancellablePromise"; import { notifyAll, notifySomeone } from "../../helpers/context"; import { getFileNameByLocation } from "../../helpers/fileName"; @@ -489,73 +490,59 @@ export class ApiFileManager { for(let offset = 0; offset < fileSize; offset += partSize) { const part = _part++; // 0, 1 yield self.downloadRequest('upload', id, () => { - return new Promise((uploadResolve, uploadReject) => { - const reader = new FileReader(); - const blob = file.slice(offset, offset + partSize); - - reader.onloadend = (e) => { - if(canceled) { - uploadReject({type: 'UPLOAD_CANCELED'}); - return; - } - - if(e.target.readyState !== FileReader.DONE) { - self.log.error('wrong readyState!'); - uploadReject({type: 'WRONG_READY_STATE'}); - return; - } - - let buffer = e.target.result as ArrayBuffer; - self.debug && self.log('Upload file part, isBig:', isBigFile, part, buffer.byteLength, new Uint8Array(buffer).length, new Uint8Array(buffer).slice().length); + const blob = file.slice(offset, offset + partSize); - /* const u = new Uint8Array(buffer.byteLength); - for(let i = 0; i < u.length; ++i) { - //u[i] = Math.random() * 255 | 0; - u[i] = 0; - } - buffer = u.buffer; */ + return readBlobAsArrayBuffer(blob).then(buffer => { + if(canceled) { + throw {type: 'UPLOAD_CANCELED'}; + } + + self.debug && self.log('Upload file part, isBig:', isBigFile, part, buffer.byteLength, new Uint8Array(buffer).length, new Uint8Array(buffer).slice().length); + + /* const u = new Uint8Array(buffer.byteLength); + for(let i = 0; i < u.length; ++i) { + //u[i] = Math.random() * 255 | 0; + u[i] = 0; + } + buffer = u.buffer; */ - /* setTimeout(() => { - doneParts++; - uploadResolve(); + /* setTimeout(() => { + doneParts++; + uploadResolve(); - //////this.log('Progress', doneParts * partSize / fileSize); + //////this.log('Progress', doneParts * partSize / fileSize); - self.log('done part', part, doneParts); + self.log('done part', part, doneParts); - deferred.notify({done: doneParts * partSize, total: fileSize}); + deferred.notify({done: doneParts * partSize, total: fileSize}); - if(doneParts >= totalParts) { - deferred.resolve(resultInputFile); - resolved = true; - } - }, 1250); - return; */ - - apiManager.invokeApi(method, { - file_id: fileId, - file_part: part, - file_total_parts: totalParts, - bytes: buffer/* new Uint8Array(buffer) */ - } as any, { - //startMaxLength: partSize + 256, - fileUpload: true - }).then((result) => { - doneParts++; - uploadResolve(); + if(doneParts >= totalParts) { + deferred.resolve(resultInputFile); + resolved = true; + } + }, 1250); + return; */ + + return apiManager.invokeApi(method, { + file_id: fileId, + file_part: part, + file_total_parts: totalParts, + bytes: buffer/* new Uint8Array(buffer) */ + } as any, { + //startMaxLength: partSize + 256, + fileUpload: true + }).then((result) => { + doneParts++; - //////this.log('Progress', doneParts * partSize / fileSize); + //////this.log('Progress', doneParts * partSize / fileSize); - deferred.notify({done: doneParts * partSize, total: fileSize}); + deferred.notify({done: doneParts * partSize, total: fileSize}); - if(doneParts >= totalParts) { - deferred.resolve(resultInputFile); - resolved = true; - } - }, errorHandler); - }; - - reader.readAsArrayBuffer(blob); + if(doneParts >= totalParts) { + deferred.resolve(resultInputFile); + resolved = true; + } + }, errorHandler); }); }, activeDelta).catch(errorHandler); } diff --git a/src/lib/mtproto/mtproto.worker.ts b/src/lib/mtproto/mtproto.worker.ts index 2ec31a45..9fa5f85d 100644 --- a/src/lib/mtproto/mtproto.worker.ts +++ b/src/lib/mtproto/mtproto.worker.ts @@ -20,6 +20,7 @@ import sessionStorage from '../sessionStorage'; import { LocalStorageProxyTask } from '../localStorage'; import { WebpConvertTask } from '../webp/webpWorkerController'; import { socketsProxied } from './transports/socketProxied'; +import { ToggleStorageTask } from './mtprotoworker'; let webpSupported = false; export const isWebpSupported = () => { @@ -95,6 +96,12 @@ const taskListeners = { forceReconnect: () => { networkerFactory.forceReconnect(); }, + + toggleStorage: (task: ToggleStorageTask) => { + const enabled = task.payload; + // AppStorage.toggleStorage(enabled); + CacheStorageController.toggleStorage(enabled); + } }; const onMessage = async(e: any) => { @@ -153,13 +160,6 @@ const onMessage = async(e: any) => { break; } - case 'toggleStorage': { - const enabled = task.args[0]; - // AppStorage.toggleStorage(enabled); - CacheStorageController.toggleStorage(enabled); - break; - } - case 'setLanguage': case 'startAll': case 'stopAll': { diff --git a/src/lib/mtproto/mtprotoworker.ts b/src/lib/mtproto/mtprotoworker.ts index aa700971..829ca828 100644 --- a/src/lib/mtproto/mtprotoworker.ts +++ b/src/lib/mtproto/mtprotoworker.ts @@ -6,7 +6,7 @@ import type { LocalStorageProxyTask, LocalStorageProxyTaskResponse } from '../localStorage'; //import type { LocalStorageProxyDeleteTask, LocalStorageProxySetTask } from '../storage'; -import type { InvokeApiOptions } from '../../types'; +import type { InvokeApiOptions, WorkerTaskVoidTemplate } from '../../types'; import type { MethodDeclMap } from '../../layer'; import MTProtoWorker from 'worker-loader!./mtproto.worker'; //import './mtproto.worker'; @@ -29,6 +29,7 @@ import appRuntimeManager from '../appManagers/appRuntimeManager'; import { SocketProxyTask } from './transports/socketProxied'; import telegramMeWebManager from './telegramMeWebManager'; import { pause } from '../../helpers/schedulers'; +import { CacheStorageDbName } from '../cacheStorage'; type Task = { taskId: number, @@ -45,9 +46,15 @@ type HashOptions = { [queryJSON: string]: HashResult }; +export interface ToggleStorageTask extends WorkerTaskVoidTemplate { + type: 'toggleStorage', + payload: boolean +}; + export class ApiManagerProxy extends CryptoWorkerMethods { public worker: /* Window */Worker; public postMessage: (...args: any[]) => void; + public postSWMessage: (...args: any[]) => void = () => {}; private afterMessageIdTemp = 0; private taskId = 0; @@ -101,6 +108,7 @@ export class ApiManagerProxy extends CryptoWorkerMethods { this.registerServiceWorker(); this.addTaskListener('clear', () => { + const toClear: CacheStorageDbName[] = ['cachedFiles', 'cachedStreamChunks']; Promise.all([ AppStorage.toggleStorage(false), sessionStorage.clear(), @@ -108,7 +116,8 @@ export class ApiManagerProxy extends CryptoWorkerMethods { telegramMeWebManager.setAuthorized(false), pause(3000) ]), - webPushApiManager.forceUnsubscribe() + webPushApiManager.forceUnsubscribe(), + Promise.all(toClear.map(cacheName => caches.delete(cacheName))) ]).finally(() => { appRuntimeManager.reload(); }); @@ -220,6 +229,8 @@ export class ApiManagerProxy extends CryptoWorkerMethods { this.log('SW statechange', e); }); + this.postSWMessage = worker.controller.postMessage.bind(worker.controller); + /// #if MTPROTO_SW const controller = worker.controller || registration.installing || registration.waiting || registration.active; this.onWorkerFirstMessage(controller); @@ -539,7 +550,9 @@ export class ApiManagerProxy extends CryptoWorkerMethods { } public toggleStorage(enabled: boolean) { - return this.performTaskWorkerVoid('toggleStorage', enabled); + const task: ToggleStorageTask = {type: 'toggleStorage', payload: enabled}; + this.postMessage(task); + this.postSWMessage(task); } public stopAll() { diff --git a/src/lib/serviceWorker/index.service.ts b/src/lib/serviceWorker/index.service.ts index 4825d27d..7bb31f58 100644 --- a/src/lib/serviceWorker/index.service.ts +++ b/src/lib/serviceWorker/index.service.ts @@ -8,26 +8,28 @@ import '../mtproto/mtproto.worker'; /// #endif //import CacheStorageController from '../cacheStorage'; -import type { WorkerTaskTemplate, WorkerTaskVoidTemplate } from '../../types'; +import type { Modify, WorkerTaskTemplate, WorkerTaskVoidTemplate } from '../../types'; import type { InputFileLocation, UploadFile } from '../../layer'; import type { WebPushApiManager } from '../mtproto/webPushApiManager'; import type { PushNotificationObject } from './push'; +import type { ToggleStorageTask } from '../mtproto/mtprotoworker'; import { logger, LogTypes } from '../logger'; import { CancellablePromise } from '../../helpers/cancellablePromise'; import { CACHE_ASSETS_NAME, requestCache } from './cache'; import onStreamFetch from './stream'; import { closeAllNotifications, onPing } from './push'; +import CacheStorageController from '../cacheStorage'; export const log = logger('SW', LogTypes.Error | LogTypes.Debug | LogTypes.Log | LogTypes.Warn); const ctx = self as any as ServiceWorkerGlobalScope; -export const deferredPromises: {[taskId: number]: CancellablePromise} = {}; +export const deferredPromises: {[taskId: string]: CancellablePromise} = {}; -export interface RequestFilePartTask extends WorkerTaskTemplate { +export interface RequestFilePartTask extends Modify { type: 'requestFilePart', payload: [number, InputFileLocation, number, number] }; -export interface RequestFilePartTaskResponse extends WorkerTaskTemplate { +export interface RequestFilePartTaskResponse extends Modify { type: 'requestFilePart', payload?: UploadFile.uploadFile, originalPayload?: RequestFilePartTask['payload'] @@ -55,7 +57,7 @@ export interface ServiceWorkerPushClickTask extends WorkerTaskVoidTemplate { payload: PushNotificationObject }; -export type ServiceWorkerTask = RequestFilePartTaskResponse | ServiceWorkerPingTask | ServiceWorkerNotificationsClearTask; +export type ServiceWorkerTask = RequestFilePartTaskResponse | ServiceWorkerPingTask | ServiceWorkerNotificationsClearTask | ToggleStorageTask; /// #if !MTPROTO_SW const taskListeners: { @@ -77,6 +79,9 @@ const taskListeners: { } delete deferredPromises[task.id]; + }, + toggleStorage: (task: ToggleStorageTask) => { + CacheStorageController.toggleStorage(task.payload); } }; ctx.addEventListener('message', (e) => { @@ -89,7 +94,7 @@ ctx.addEventListener('message', (e) => { /// #endif //const cacheStorage = new CacheStorageController('cachedAssets'); -let taskId = 0; +/* let taskId = 0; export function getTaskId() { return taskId; @@ -97,7 +102,7 @@ export function getTaskId() { export function incrementTaskId() { return taskId++; -} +} */ const onFetch = (event: FetchEvent): void => { if(event.request.url.indexOf(location.origin + '/') === 0 && event.request.url.match(/\.(js|css|jpe?g|json|wasm|png|mp3|svg|tgs|ico|woff2?|ttf|webmanifest?)(?:\?.*)?$/)) { diff --git a/src/lib/serviceWorker/stream.ts b/src/lib/serviceWorker/stream.ts index 60c83dd4..56e5be08 100644 --- a/src/lib/serviceWorker/stream.ts +++ b/src/lib/serviceWorker/stream.ts @@ -4,15 +4,52 @@ * https://github.com/morethanwords/tweb/blob/master/LICENSE */ -import { deferredPromise } from "../../helpers/cancellablePromise"; +import { readBlobAsUint8Array } from "../../helpers/blob"; +import { CancellablePromise, deferredPromise } from "../../helpers/cancellablePromise"; import { notifySomeone } from "../../helpers/context"; import debounce from "../../helpers/schedulers/debounce"; import { isSafari } from "../../helpers/userAgent"; import { InputFileLocation, UploadFile } from "../../layer"; +import CacheStorageController from "../cacheStorage"; import { DownloadOptions } from "../mtproto/apiFileManager"; -import { RequestFilePartTask, deferredPromises, incrementTaskId } from "./index.service"; +import { RequestFilePartTask, deferredPromises, log } from "./index.service"; import timeout from "./timeout"; +const cacheStorage = new CacheStorageController('cachedStreamChunks'); +const CHUNK_TTL = 86400; +const CHUNK_CACHED_TIME_HEADER = 'Time-Cached'; + +const clearOldChunks = () => { + return cacheStorage.timeoutOperation((cache) => { + return cache.keys().then(requests => { + const filtered: Map = new Map(); + const timestamp = Date.now() / 1000 | 0; + for(const request of requests) { + const match = request.url.match(/\/(\d+?)\?/); + if(match && !filtered.has(match[1])) { + filtered.set(match[1], request); + } + } + + const promises: Promise[] = []; + for(const [id, request] of filtered) { + const promise = cache.match(request).then((response) => { + if((+response.headers.get(CHUNK_CACHED_TIME_HEADER) + CHUNK_TTL) <= timestamp) { + log('will delete stream chunk:', id); + return cache.delete(request, {ignoreSearch: true, ignoreVary: true}); + } + }); + + promises.push(promise); + } + + return Promise.all(promises); + }); + }); +}; + +setInterval(clearOldChunks, 1800e3); + type StreamRange = [number, number]; type StreamId = string; const streams: Map = new Map(); @@ -20,6 +57,7 @@ class Stream { private destroyDebounced: () => void; private id: StreamId; private limitPart: number; + private loadedOffsets: Set = new Set(); constructor(private info: DownloadOptions) { this.id = Stream.getId(info); @@ -27,24 +65,85 @@ class Stream { // ! если грузить очень большое видео чанками по 512Кб в мобильном Safari, то стрим не запустится this.limitPart = info.size > (75 * 1024 * 1024) ? STREAM_CHUNK_UPPER_LIMIT : STREAM_CHUNK_MIDDLE_LIMIT; - this.destroyDebounced = debounce(this.destroy, 15000, false, true); + this.destroyDebounced = debounce(this.destroy, 150000, false, true); } private destroy = () => { streams.delete(this.id); }; - private requestFilePart(alignedOffset: number, limit: number) { - const task: RequestFilePartTask = { + private requestFilePartFromWorker(alignedOffset: number, limit: number, fromPreload = false) { + const task: Omit = { type: 'requestFilePart', - id: incrementTaskId(), payload: [this.info.dcId, this.info.location, alignedOffset, limit] }; + const taskId = JSON.stringify(task); + (task as RequestFilePartTask).id = taskId; + + let deferred = deferredPromises[taskId] as CancellablePromise; + if(deferred) { + return deferred.then(uploadFile => uploadFile.bytes); + } + notifySomeone(task); + + this.loadedOffsets.add(alignedOffset); - const deferred = deferredPromises[task.id] = deferredPromise(); - return deferred; + deferred = deferredPromises[taskId] = deferredPromise(); + const bytesPromise = deferred.then(uploadFile => uploadFile.bytes); + + this.saveChunkToCache(bytesPromise, alignedOffset, limit); + !fromPreload && this.preloadChunks(alignedOffset, alignedOffset + (this.limitPart * 15)); + + return bytesPromise; + } + + private requestFilePartFromCache(alignedOffset: number, limit: number, fromPreload?: boolean) { + const key = this.getChunkKey(alignedOffset, limit); + return cacheStorage.getFile(key).then((blob: Blob) => { + return fromPreload ? new Uint8Array() : readBlobAsUint8Array(blob); + }, (error) => { + if(error === 'NO_ENTRY_FOUND') { + return; + } + }); + } + + private requestFilePart(alignedOffset: number, limit: number, fromPreload?: boolean) { + return this.requestFilePartFromCache(alignedOffset, limit, fromPreload).then(bytes => { + return bytes || this.requestFilePartFromWorker(alignedOffset, limit, fromPreload); + }); + } + + private saveChunkToCache(deferred: Promise, alignedOffset: number, limit: number) { + return deferred.then(bytes => { + const key = this.getChunkKey(alignedOffset, limit); + const response = new Response(bytes, { + headers: { + 'Content-Length': '' + bytes.length, + 'Content-Type': 'application/octet-stream', + [CHUNK_CACHED_TIME_HEADER]: '' + (Date.now() / 1000 | 0) + } + }); + + return cacheStorage.save(key, response); + }); + } + + private preloadChunks(offset: number, end: number) { + if(end > this.info.size) { + end = this.info.size; + } + + for(; offset < end; offset += this.limitPart) { + if(this.loadedOffsets.has(offset)) { + continue; + } + + this.loadedOffsets.add(offset); + this.requestFilePart(offset, this.limitPart, true); + } } public requestRange(range: StreamRange) { @@ -66,9 +165,7 @@ class Stream { const limit = end && end < this.limitPart ? alignLimit(end - offset + 1) : this.limitPart; const alignedOffset = alignOffset(offset, limit); - return this.requestFilePart(alignedOffset, limit).then(result => { - let ab = result.bytes as Uint8Array; - + return this.requestFilePart(alignedOffset, limit).then(ab => { //log.debug('[stream] requestFilePart result:', result); const headers: Record = { @@ -96,11 +193,15 @@ class Stream { }); } + private getChunkKey(alignedOffset: number, limit: number) { + return this.id + '?offset=' + alignedOffset + '&limit=' + limit; + } + public static get(info: DownloadOptions) { return streams.get(this.getId(info)) ?? new Stream(info); } - public static getId(info: DownloadOptions) { + private static getId(info: DownloadOptions) { return (info.location as InputFileLocation.inputDocumentFileLocation).id; } }