LineReader
This commit is contained in:
parent
5d050abd09
commit
8e1c5941bb
2 changed files with 98 additions and 28 deletions
|
@ -2,6 +2,7 @@
|
|||
// See LICENSE.txt for license information.
|
||||
import {ArchiveUtils, IArchiveHeader, IArchiveLine, IBlockArchiveLine} from './blocks/archive'
|
||||
import {IBlock, IMutableBlock} from './blocks/block'
|
||||
import {LineReader} from './lineReader'
|
||||
import mutator from './mutator'
|
||||
import {Utils} from './utils'
|
||||
import {BoardTree} from './viewModel/boardTree'
|
||||
|
@ -37,46 +38,40 @@ class Archiver {
|
|||
}
|
||||
|
||||
private static async readBlocksFromFile(file: File): Promise<IBlock[]> {
|
||||
// TODO: Read input as a stream, line by line
|
||||
const contents = await (new Response(file)).text()
|
||||
Utils.log(`Import ${contents.length} bytes.`)
|
||||
|
||||
const blocks: IBlock[] = []
|
||||
const allLineStrings = contents.split('\n')
|
||||
if (allLineStrings.length >= 2) {
|
||||
const headerString = allLineStrings[0]
|
||||
const header = JSON.parse(headerString) as IArchiveHeader
|
||||
if (header.date && header.version >= 1) {
|
||||
const date = new Date(header.date)
|
||||
Utils.log(`Import archive, version: ${header.version}, date/time: ${date.toLocaleString()}, ${blocks.length} block(s).`)
|
||||
|
||||
const lineStrings = allLineStrings.slice(1)
|
||||
for (const lineString of lineStrings) {
|
||||
if (!lineString) {
|
||||
// Ignore empty lines, e.g. last line
|
||||
continue
|
||||
let isFirstLine = true
|
||||
return new Promise<IBlock[]>((resolve) => {
|
||||
LineReader.readFile(file, (line, completed) => {
|
||||
if (completed) {
|
||||
resolve(blocks)
|
||||
return
|
||||
}
|
||||
|
||||
if (isFirstLine) {
|
||||
isFirstLine = false
|
||||
const header = JSON.parse(line) as IArchiveHeader
|
||||
if (header.date && header.version >= 1) {
|
||||
const date = new Date(header.date)
|
||||
Utils.log(`Import archive, version: ${header.version}, date/time: ${date.toLocaleString()}.`)
|
||||
}
|
||||
|
||||
const line = JSON.parse(lineString) as IArchiveLine
|
||||
if (!line || !line.type || !line.data) {
|
||||
} else {
|
||||
const row = JSON.parse(line) as IArchiveLine
|
||||
if (!row || !row.type || !row.data) {
|
||||
Utils.logError('importFullArchive ERROR parsing line')
|
||||
continue
|
||||
return
|
||||
}
|
||||
switch (line.type) {
|
||||
switch (row.type) {
|
||||
case 'block': {
|
||||
const blockLine = line as IBlockArchiveLine
|
||||
const blockLine = row as IBlockArchiveLine
|
||||
const block = blockLine.data
|
||||
blocks.push(block)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Utils.logError('importFullArchive ERROR parsing header')
|
||||
}
|
||||
}
|
||||
|
||||
return blocks
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
static importFullArchive(onComplete?: () => void): void {
|
||||
|
|
75
webapp/src/lineReader.ts
Normal file
75
webapp/src/lineReader.ts
Normal file
|
@ -0,0 +1,75 @@
|
|||
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
|
||||
// See LICENSE.txt for license information.
|
||||
|
||||
class LineReader {
|
||||
private static appendBuffer(buffer1: ArrayBuffer, buffer2: ArrayBuffer) {
|
||||
const tmp = new Uint8Array(buffer1.byteLength + buffer2.byteLength)
|
||||
tmp.set(new Uint8Array(buffer1), 0)
|
||||
tmp.set(new Uint8Array(buffer2), buffer1.byteLength)
|
||||
return tmp.buffer
|
||||
}
|
||||
|
||||
private static arrayBufferIndexOf(buffer: ArrayBuffer, charCode: number): number {
|
||||
const view = new Uint8Array(buffer)
|
||||
for (let i = 0; i < view.byteLength; ++i) {
|
||||
if (view[i] === charCode) {
|
||||
return i
|
||||
}
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
static readFile(file: File, callback: (line: string, completed: boolean) => void): void {
|
||||
let buffer = new ArrayBuffer(0)
|
||||
|
||||
const chunkSize = 1024 * 1000
|
||||
let offset = 0
|
||||
const fr = new FileReader()
|
||||
const decoder = new TextDecoder()
|
||||
|
||||
fr.onload = () => {
|
||||
const chunk = fr.result as ArrayBuffer
|
||||
buffer = LineReader.appendBuffer(buffer, chunk)
|
||||
|
||||
const newlineChar = 10 // '\n'
|
||||
let newlineIndex = LineReader.arrayBufferIndexOf(buffer, newlineChar)
|
||||
while (newlineIndex >= 0) {
|
||||
const result = decoder.decode(buffer.slice(0, newlineIndex))
|
||||
buffer = buffer.slice(newlineIndex + 1)
|
||||
callback(result, false)
|
||||
newlineIndex = LineReader.arrayBufferIndexOf(buffer, newlineChar)
|
||||
}
|
||||
|
||||
offset += chunkSize
|
||||
if (offset >= file.size) {
|
||||
// Completed
|
||||
|
||||
if (buffer.byteLength > 0) {
|
||||
// Handle last line
|
||||
callback(decoder.decode(buffer), false)
|
||||
}
|
||||
|
||||
callback('', true)
|
||||
return
|
||||
}
|
||||
|
||||
seek()
|
||||
}
|
||||
|
||||
fr.onerror = () => {
|
||||
callback('', true)
|
||||
}
|
||||
|
||||
seek()
|
||||
|
||||
function seek() {
|
||||
const slice = file.slice(offset, offset + chunkSize)
|
||||
|
||||
// Need to read as an ArrayBuffer (instead of text) to handle unicode boundaries
|
||||
fr.readAsArrayBuffer(slice)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export {LineReader}
|
Loading…
Reference in a new issue