Open mkaufma opened 5 years ago
It will be great if you can upload a PR with a test case demostrating your scenario.
Thanks!
const fs = require('fs');
const path = require('path');
const papa = require('papaparse');
let csvStream;
let shouldWriteHeadersRow = true;
let reportHeaders = ['a', 'b'];
function initStream(csvPath) {
try {
csvStream = fs.createWriteStream(csvPath);
} catch (err) {
throw new Error(err);
}
}
async function closeStream() {
await csvStream.end();
}
function getParseConfig() {
if (shouldWriteHeadersRow) {
shouldWriteHeadersRow = false;
return { columns: reportHeaders, header: true };
}
return { columns: reportHeaders, header: false };
}
async function writeToStream(dataArray) {
try {
const parseConfig = getParseConfig();
const csv = papa.unparse(dataArray, parseConfig);
await csvStream.write(csv);
} catch (err) {
throw new Error(err);
}
}
async function runTest() {
initStream('./tst.csv');
await writeToStream([{ a: '1', b: '2'}, { a: '11', b: '22'}]);
await writeToStream([{ a: '111', b: '222'}, { a: '1111', b: '2222'}]);
await closeStream();
}
runTest().then(() => {
console.log('write finished');
}).catch(e => {
console.error(e);
});
The above code creates a faulty csv file: instead of:
a,b
1,2
11,22
111,222
1111,2222
we get
a,b
1,2
11,22111,222
1111,2222
If you add a config option to make sure unparse result always ends with 'endLine' it would be great
@mkaufma @pokoli Also having the same issue. I'm using PapaParse for writing multiple times to the same file (e.g. streaming writes).
https://github.com/mholt/PapaParse/blob/a66776140f25a69cb3469cfdfcfc1f669db1c709/papaparse.js#L431
I think
data.length - 1
should be changed todata.length
.Otherwise, when using papaparse to write to a stream in chunks we have to add the newLine ourselves.