Skip to content

Commit e965149

Browse files
authored
Refactor gzip handling in CSV loader to use DecompressionStream (#954)
1 parent 19ddf45 commit e965149

File tree

1 file changed

+4
-4
lines changed

1 file changed

+4
-4
lines changed

js/data/loader/csv.js

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
import 'https://cdnjs.cloudflare.com/ajax/libs/pako/2.1.0/pako.min.js'
2-
31
export default class CSV {
42
/**
53
* @param {Array<Array<*>>} data data
@@ -112,10 +110,12 @@ export default class CSV {
112110
static async load(value, config = {}) {
113111
if (typeof value === 'string') {
114112
const response = await fetch(value)
115-
const buf = await response.arrayBuffer()
113+
let buf = await response.arrayBuffer()
116114
const decoder = new TextDecoder(config.encoding || 'utf-8')
117115
if (value.endsWith('.gz')) {
118-
return CSV.parse(decoder.decode(pako.ungzip(buf)), config)
116+
const ds = new DecompressionStream('gzip')
117+
const decompressedStream = new Blob([buf]).stream().pipeThrough(ds)
118+
buf = await new Response(decompressedStream).arrayBuffer()
119119
}
120120
return CSV.parse(decoder.decode(buf), config)
121121
} else if (value instanceof File) {

0 commit comments

Comments
 (0)