Reading CSV Files into Objects with Node.js

Full source code available here.

As I am learning Node.Js I am constantly surprised by how easy it is to do some things, and how difficult to do others, and how poor the examples out there are.

Case in point, I want to read some data from as CSV file and then send it off to Elasticsearch for indexing. The Elasticsearch part I have figured out already, and in Node.js that is easy. I expected it to be trivial to find an example of reading a file, creating objects based on the rows, processing the objects, and doing all that in chunks of say 1000 rows. This kind of stuff is easy in C# and there are plenty of examples. Not the case is Node.js.

Here two ways of doing it.

You can download a CSV with all of Shakespeare’s plays, but the example you can download from above has just the first 650 lines from that file.

I want to read the file 100 rows at a time, put the rows into objects and then process the rows.

csv-parse

The first module I came across was csv-parse.

Run this from the console -

npm install csv-parse

I have a Entry class that represents a row from the CSV file. Its constructor takes six parameters that represent each of the columns in the CSV file.

In this example processing the data in chunks is not necessary, but when I process the full file with over 130,000 rows, chunking becomes important.

 1var fs = require('fs');
 2var parse = require('csv-parse');
 3
 4
 5function readCSV() {
 6    let entries = [];
 7    let count = 0;
 8
 9    fs.createReadStream('shakespeare_plays_sample.csv')
10        .pipe(parse({ delimiter: ';', from_line: 2 }))
11        .on('data', function (row) {
12            count++;
13            entries.push(new Entry(row[0], row[1], row[2], row[3], row[4], row[5]))
14
15            if (count % 100 == 0) {
16                processEntries(entries);
17                count = 0;
18                entries = []; // clear the array
19            }
20        })
21        .on('end', function () {
22            processEntries(entries);
23        });
24}
25
26function processEntries(entries) {
27    console.log(entries[0].Id + "  to " + entries[entries.length - 1].Id);
28}
29
30class Entry {
31    constructor(id, play, characterLineNumber, actSceneLine, character, line) {
32        this.Id = id;
33        this.Play = play;
34        this.CharacterLineNumber = characterLineNumber;
35        this.ActSceneLine = actSceneLine;
36        this.Character = character;
37        this.Line = line;
38    }
39}
40
41readCSV();

Note how I have to make sure that the last chuck is also processed on line 22.

This approach seems fine, but then I found the csvtojson module.

csvtojson

This module makes what I’m trying to do a little easier by skipping over the need to explicitly construct an object with the data from the rows in the file.

First install the module -

npm install csvtojson

This is the code -

 1const csv=require('csvtojson');
 2
 3function readCSV(){
 4    let entries = [];
 5    let count = 0;
 6
 7    csv({delimiter:';'})
 8    .fromFile('./shakespeare_plays_sample.csv')
 9    .then((json)=>{
10        json.forEach((row)=>
11        {   
12            count++;
13            entries.push(row);
14            if(count % 100 == 0){
15                processEntries(entries);
16                count = 0;
17                entries = []; // clear the array
18            }
19        });
20        processEntries(entries);
21    })
22    return entries;
23}
24
25function processEntries(entries){
26    console.log(entries[0].Id + "  to " + entries[entries.length - 1].Id);
27}
28
29readCSV();

Again note how I process the chunks of 100, and then final chunk on line 20.

All of this is the aim of indexing all of Shakespeare’s works in Elasticsearch, and that I will show in another post.

Full source code available here.

comments powered by Disqus

Related