Skip to content

Commit

Permalink
Merge pull request #1153 from nextcloud-libraries/fix/do-not-read-chu…
Browse files Browse the repository at this point in the history
…nks-into-memory

fix(upload): Do not read chunks into memory but just stream file chunks
  • Loading branch information
susnux authored Apr 15, 2024
2 parents b761611 + fcf0b57 commit 221903f
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 21 deletions.
26 changes: 7 additions & 19 deletions lib/utils/upload.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,6 @@ import type { AxiosProgressEvent, AxiosResponse } from 'axios'
import { generateRemoteUrl } from '@nextcloud/router'
import { getCurrentUser } from '@nextcloud/auth'
import axios from '@nextcloud/axios'
import PLimit from 'p-limit'

const readerLimit = PLimit(1)
const reader = new FileReader()

type UploadData = Blob | (() => Promise<Blob>)

Expand Down Expand Up @@ -57,31 +53,23 @@ export const uploadData = async function(
}

/**
* Get chunk of the file. Doing this on the fly
* give us a big performance boost and proper
* garbage collection
* Get chunk of the file.
* Doing this on the fly give us a big performance boost and proper garbage collection
* @param file File to upload
* @param start Offset to start upload
* @param length Size of chunk to upload
*/
export const getChunk = function(file: File, start: number, length: number): Promise<Blob> {
if (start === 0 && file.size <= length) {
return Promise.resolve(new Blob([file], { type: file.type || 'application/octet-stream' }))
}

// Since we use a global FileReader, we need to only read one chunk at a time
return readerLimit(() => new Promise((resolve, reject) => {
reader.onload = () => {
if (reader.result !== null) {
resolve(new Blob([reader.result], {
type: 'application/octet-stream',
}))
}
reject(new Error('Error while reading the file'))
}
reader.readAsArrayBuffer(file.slice(start, start + length))
}))
return Promise.resolve(new Blob([file.slice(start, start + length)], { type: 'application/octet-stream' }))
}

/**
* Create a temporary upload workspace to upload the chunks to
* @param destinationFile The file name after finishing the chunked upload
*/
export const initChunkWorkspace = async function(destinationFile: string | undefined = undefined): Promise<string> {
const chunksWorkspace = generateRemoteUrl(`dav/uploads/${getCurrentUser()?.uid}`)
Expand Down
3 changes: 2 additions & 1 deletion package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,6 @@
"buffer": "^6.0.3",
"crypto-browserify": "^3.12.0",
"p-cancelable": "^4.0.1",
"p-limit": "^5.0.0",
"p-queue": "^8.0.0",
"simple-eta": "^3.0.2"
},
Expand Down

0 comments on commit 221903f

Please sign in to comment.