Coverage

95%
486
464
22

csv.js

97%
68
66
2
LineHitsSource
1// Generated by CoffeeScript 1.3.3
2/*
3
4Node CSV
5========
6
7This project provides CSV parsing and has been tested and used
8on a large input file (over 2Gb).
9
10* Follow the NodeJs streaming API
11* Async and event based
12* Support delimiters, quotes and escape characters
13* Line breaks discovery: detected in source and reported to destination
14* Data transformation
15* Support for large datasets
16* Complete test coverage as sample and inspiration
17* no external dependencies
18
19Important, this documentation cover the current version of the node
20csv parser. The documentation for the current version 0.1.0 is
21available [here](https://github.com/wdavidw/node-csv-parser/tree/v0.1).
22
23Quick example
24-------------
25
26 // node samples/string.js
27 var csv = require('csv');
28 csv()
29 .from( '"1","2","3","4"\n"a","b","c","d"' )
30 .to( console.log )
31 // Output:
32 // 1,2,3,4
33 // a,b,c,d
34
35Advanced example
36----------------
37
38The following example illustrates 4 usages of the library:
391. Plug a readable stream by defining a file path
402. Direct output to a file path
413. Transform the data (optional)
424. Listen to events (optional)
43
44 // node samples/sample.js
45 var csv = require('csv');
46 csv()
47 .from.stream(fs.createReadStream(__dirname+'/sample.in')
48 .to.path(__dirname+'/sample.out')
49 .transform( function(data){
50 data.unshift(data.pop());
51 return data;
52 })
53 .on('record', function(data,index){
54 console.log('#'+index+' '+JSON.stringify(data));
55 })
56 .on('end', function(count){
57 console.log('Number of lines: '+count);
58 })
59 .on('error', function(error){
60 console.log(error.message);
61 });
62 // Output:
63 // #0 ["2000-01-01","20322051544","1979.0","8.8017226E7","ABC","45"]
64 // #1 ["2050-11-27","28392898392","1974.0","8.8392926E7","DEF","23"]
65 // Number of lines: 2
66
67Pipe example
68------------
69
70The module follow a Stream architecture. At it's core, the parser and
71the stringifier utilities provide a [Stream Writer][writable_stream]
72and a [Stream Reader][readable_stream] implementation available in the CSV API.
73
74 |-----------| |---------|---------| |---------|
75 | | | | | | |
76 | | | CSV | | |
77 | | | | | | |
78 | Stream | | Writer | Reader | | Stream |
79 | Reader |.pipe(| API | API |).pipe(| Writer |)
80 | | | | | | |
81 | | | | | | |
82 |-----------| |---------|---------| |---------|
83
84Here's a quick example:
85
86 in = fs.createReadStream('./in')
87 out = fs.createWriteStream('./out')
88 in.pipe(csv()).pipe(out)
89
90Installing
91----------
92
93Via [npm](http://github.com/isaacs/npm):
94```bash
95npm install csv
96```
97
98Via git (or downloaded tarball):
99```bash
100git clone http://github.com/wdavidw/node-csv-parser.git
101```
102
103Events
104------
105
106The library extends Node [EventEmitter][event] class and emit all
107the events of the Writable and Readable [Stream API][stream]. Additionally, the useful "records" event
108is emitted.
109
110* *record*
111 Emitted by the stringifier when a new row is parsed and transformed. The data is
112 the value returned by the user `transform` callback if any. Note however that the event won't
113 be called if transform return `null` since the record is skipped.
114 The callback provides two arguments. `data` is the CSV line being processed (an array or an object)
115 and `index` is the index number of the line starting at zero
116* *data*
117 Emitted by the stringifier on each line once the data has been transformed and stringified.
118* *drain*
119* *end*
120 Emitted when the CSV content has been parsed.
121* *close*
122 Emitted when the underlying resource has been closed. For example, when writting to a file with `csv().to.path()`, the event will be called once the writing process is complete and the file closed.
123* *error*
124 Thrown whenever an error occured.
125*/
126
1271var CSV, from, options, parser, state, stream, stringifier, to, transformer;
128
1291stream = require('stream');
130
1311state = require('./state');
132
1331options = require('./options');
134
1351from = require('./from');
136
1371to = require('./to');
138
1391stringifier = require('./stringifier');
140
1411parser = require('./parser');
142
1431transformer = require('./transformer');
144
1451CSV = function() {
14678 this.paused = false;
14778 this.readable = true;
14878 this.writable = true;
14978 this.state = state();
15078 this.options = options();
15178 this.from = from(this);
15278 this.to = to(this);
15378 this.parser = parser(this);
15478 this.parser.on('row', (function(row) {
15527450 return this.transformer.transform(row);
156 }).bind(this));
15778 this.parser.on('end', (function() {
15873 return this.transformer.end();
159 }).bind(this));
16078 this.parser.on('error', (function(e) {
1613 return this.error(e);
162 }).bind(this));
16378 this.stringifier = stringifier(this);
16478 this.transformer = transformer(this);
16578 this.transformer.on('end', (function() {
16673 return this.emit('end', this.state.count);
167 }).bind(this));
16878 return this;
169};
170
1711CSV.prototype.__proto__ = stream.prototype;
172
173/*
174
175`pause()`
176---------
177
178Implementation of the Readable Stream API, requesting that no further data
179be sent until resume() is called.
180*/
181
182
1831CSV.prototype.pause = function() {
18419463 return this.paused = true;
185};
186
187/*
188
189`resume()`
190----------
191
192Implementation of the Readable Stream API, resuming the incoming 'data'
193events after a pause().
194*/
195
196
1971CSV.prototype.resume = function() {
19815346 this.paused = false;
19915346 return this.emit('drain');
200};
201
202/*
203
204`write(data, [preserve])`
205-------------------------
206
207Implementation of the Writable Stream API with a larger signature. Data
208may be a string, a buffer, an array or an object.
209
210If data is a string or a buffer, it could span multiple lines. If data
211is an object or an array, it must represent a single line.
212Preserve is for line which are not considered as CSV data.
213*/
214
215
2161CSV.prototype.write = function(data, preserve) {
21729461 var csv;
21829461 if (!this.writable) {
2190 return false;
220 }
22129461 if (data instanceof Buffer) {
22226286 data = data.toString();
223 }
22429461 if (typeof data === 'string' && !preserve) {
22526420 this.parser.parse(data);
2263041 } else if (Array.isArray(data) && !this.state.transforming) {
2272020 csv = this;
2282020 this.transformer.transform(data);
229 } else {
2301021 if (preserve || this.state.transforming) {
2319 this.stringifier.write(data, preserve);
232 } else {
2331012 this.transformer.transform(data);
234 }
235 }
23629461 return !this.paused;
237};
238
239/*
240
241`end()`
242-------
243
244Terminate the parsing. Call this method when no more csv data is
245to be parsed. It implement the StreamWriter API by setting the `writable`
246property to "false" and emitting the `end` event.
247*/
248
249
2501CSV.prototype.end = function() {
25175 if (!this.writable) {
2521 return;
253 }
25474 this.readable = false;
25574 this.writable = false;
25674 return this.parser.end();
257};
258
259/*
260
261`transform(callback)`
262---------------------
263
264Register the transformer callback. The callback is a user provided
265function call on each line to filter, enrich or modify the
266dataset. More information in the "transforming data" section.
267*/
268
269
2701CSV.prototype.transform = function(callback) {
27131 this.transformer.callback = callback;
27231 return this;
273};
274
275/*
276
277`error(error)`
278--------------
279
280Unified mechanism to handle error, emit the error and mark the
281stream as non readable and non writable.
282*/
283
284
2851CSV.prototype.error = function(e) {
2865 this.readable = false;
2875 this.writable = false;
2885 this.emit('error', e);
2895 if (this.readStream) {
2900 this.readStream.destroy();
291 }
2925 return this;
293};
294
2951module.exports = function() {
29678 return new CSV;
297};
298
299/*
300[event]: http://nodejs.org/api/events.html
301[stream]: http://nodejs.org/api/stream.html
302[writable_stream]: http://nodejs.org/api/stream.html#stream_writable_stream
303[readable_stream]: http://nodejs.org/api/stream.html#stream_readable_stream
304*/
305

state.js

100%
2
2
0
LineHitsSource
1// Generated by CoffeeScript 1.3.3
2
31module.exports = function() {
478 return {
5 count: 0,
6 field: '',
7 line: [],
8 lastC: '',
9 countWriten: 0,
10 transforming: 0
11 };
12};

options.js

100%
2
2
0
LineHitsSource
1// Generated by CoffeeScript 1.3.3
2/*
3Input and output options
4========================
5
6The `options` property provide access to the `from` and `to` object used to store options. This
7property is for internal usage and could be considered private. It is recommanded to use
8the `from.options()` and `to.options()` to access those objects.
9*/
10
111module.exports = function() {
1278 return {
13 from: {
14 delimiter: ',',
15 quote: '"',
16 escape: '"',
17 columns: null,
18 flags: 'r',
19 encoding: 'utf8',
20 trim: false,
21 ltrim: false,
22 rtrim: false
23 },
24 to: {
25 delimiter: null,
26 quote: null,
27 quoted: false,
28 escape: null,
29 columns: null,
30 header: false,
31 lineBreaks: null,
32 flags: 'w',
33 encoding: 'utf8',
34 newColumns: false,
35 end: true
36 }
37 };
38};

from.js

91%
60
55
5
LineHitsSource
1// Generated by CoffeeScript 1.3.3
21var Stream, fs, path, utils, _ref;
3
41fs = require('fs');
5
61path = require('path');
7
81if ((_ref = fs.exists) == null) {
90 fs.exists = path.exists;
10}
11
121utils = require('./utils');
13
141Stream = require('stream');
15
16/*
17
18Reading data from a source
19==========================
20
21The `csv().from` property provides functions to read from an external
22source and write to a CSV instance. The source may be a string, a file,
23a buffer or a readable stream.
24
25You may call the `from` function or one of its sub function. For example,
26here are two identical ways to read from a file:
27
28 csv.from('/tmp/data.csv').on('data', console.log);
29 csv.from.path('/tmp/data.csv').on('data', console.log);
30*/
31
32
331module.exports = function(csv) {
34 /*
35
36 `from(mixed)`
37 -------------
38
39 Read from any sort of source. It should be considered as a convenient function which
40 will discover the nature of the data source to parse.
41
42 If it is a string, then if check if it match an existing file path and read the file content,
43 otherwise, it treat the string as csv data. If it is an instance of stream, it consider the
44 object to be an input stream. If is an array, then for each line should correspond a record.
45
46 Here's some examples on how to use this function:
47
48 csv()
49 .from('"1","2","3","4"\n"a","b","c","d"')
50 .on('end', function(){ console.log('done') })
51
52 csv()
53 .from('./path/to/file.csv')
54 .on('end', function(){ console.log('done') })
55
56 csv()
57 .from(fs.createReadStream('./path/to/file.csv'))
58 .on('end', function(){ console.log('done') })
59
60 csv()
61 .from(['"1","2","3","4","5"',['1','2','3','4','5']])
62 .on('end', function(){ console.log('done') })
63 */
64
6578 var from;
6678 from = function(mixed, options) {
6713 var error;
6813 error = false;
6913 switch (typeof mixed) {
70 case 'string':
719 fs.exists(mixed, function(exists) {
729 if (exists) {
731 return from.path(mixed, options);
74 } else {
758 return from.string(mixed, options);
76 }
77 });
789 break;
79 case 'object':
804 if (Array.isArray(mixed)) {
813 from.array(mixed, options);
82 } else {
831 if (mixed instanceof Stream) {
841 from.stream(mixed, options);
85 } else {
860 error = true;
87 }
88 }
894 break;
90 default:
910 error = true;
92 }
9313 if (error) {
940 csv.error(new Error("Invalid mixed argument in from"));
95 }
9613 return csv;
97 };
98 /*
99
100 `from.options([options])`
101 -------------------------
102
103 Update and retrieve options relative to the input source. Return
104 the options as an object if no argument is provided.
105
106 * `delimiter` Set the field delimiter, one character only, defaults to comma.
107 * `quote` Set the field delimiter, one character only, defaults to double quotes.
108 * `escape` Set the field delimiter, one character only, defaults to double quotes.
109 * `columns` List of fields or true if autodiscovered in the first CSV line, default to null. Impact the `transform` argument and the `data` event by providing an object instead of an array, order matters, see the transform and the columns sections for more details.
110 * `flags` Used to read a file stream, default to the r charactere.
111 * `encoding` Encoding of the read stream, defaults to 'utf8', applied when a readable stream is created.
112 * `trim` If true, ignore whitespace immediately around the delimiter, defaults to false.
113 * `ltrim` If true, ignore whitespace immediately following the delimiter (i.e. left-trim all fields), defaults to false.
114 * `rtrim` If true, ignore whitespace immediately preceding the delimiter (i.e. right-trim all fields), defaults to false.
115
116 Additionnaly, in case you are working with stream, you can pass all
117 the options accepted by the `stream.pipe` function.
118 */
119
12078 from.options = function(options) {
121191 if (options != null) {
12224 utils.merge(csv.options.from, options);
12324 return csv;
124 } else {
125167 return csv.options.from;
126 }
127 };
128 /*
129
130 `from.array(data, [options])`
131 ------------------------------
132
133 Read from an array. Take an array as first argument and optionally
134 some options as a second argument. Each element of the array
135 represents a csv record. Those elements may be a string, a buffer, an
136 array or an object.
137 */
138
13978 from.array = function(data, options) {
1408 this.options(options);
1418 process.nextTick(function() {
1428 var record, _i, _len;
1438 for (_i = 0, _len = data.length; _i < _len; _i++) {
14417 record = data[_i];
14517 csv.write(record);
146 }
1478 return csv.end();
148 });
1498 return csv;
150 };
151 /*
152
153 `from.string(data, [options])`
154 -------------------------------
155
156 Read from a string or a buffer. Take a string as first argument and
157 optionally an object of options as a second argument. The string
158 must be the complete csv data, look at the streaming alternative if your
159 CSV is large.
160
161 csv()
162 .from( '"1","2","3","4"\n"a","b","c","d"' )
163 .to( function(data){} )
164 */
165
16678 from.string = function(data, options) {
16717 this.options(options);
16817 process.nextTick(function() {
16917 csv.write(data);
17017 return csv.end();
171 });
17217 return csv;
173 };
174 /*
175
176 `from.path(path, [options])`
177 ----------------------------
178
179 Read from a file path. Take a file path as first argument and optionally an object
180 of options as a second argument.
181 */
182
18378 from.path = function(path, options) {
18440 var stream;
18540 this.options(options);
18640 stream = fs.createReadStream(path, csv.from.options());
18740 return csv.from.stream(stream);
188 };
189 /*
190
191 `from.stream(stream, [options])`
192 --------------------------------
193
194 Read from a stream. Take a readable stream as first argument and optionally
195 an object of options as a second argument.
196 */
197
19878 from.stream = function(stream, options) {
19943 if (options) {
2000 this.options(options);
201 }
20243 stream.setEncoding(csv.from.options().encoding);
20343 stream.pipe(csv, csv.from.options());
20443 return csv;
205 };
20678 return from;
207};

utils.js

100%
6
6
0
LineHitsSource
1// Generated by CoffeeScript 1.3.3
2
31module.exports = {
4 merge: function(obj1, obj2) {
594 var key, r;
694 r = obj1 || {};
794 for (key in obj2) {
8618 r[key] = obj2[key];
9 }
1094 return r;
11 }
12};

to.js

86%
67
58
9
LineHitsSource
1// Generated by CoffeeScript 1.3.3
21var Stream, fs, utils;
3
41fs = require('fs');
5
61Stream = require('stream');
7
81utils = require('./utils');
9
10/*
11
12Writing data to a destination
13=============================
14
15The `csv().to` property provides functions to read from a CSV instance and
16to write to an external destination. The destination may be a stream, a file
17or a callback.
18
19You may call the `to` function or one of its sub function. For example,
20here are two identical ways to write to a file:
21
22 csv.from(data).to('/tmp/data.csv');
23 csv.from(data).to.path('/tmp/data.csv');
24*/
25
26
271module.exports = function(csv) {
28 /*
29
30 `to(mixed)`
31 -----------
32
33 Write from any sort of destination. It should be considered as a convenient function
34 which will discover the nature of the destination where to write the CSV data.
35
36 If is an function, then the csv will be provided as the first argument
37 of the callback. If it is a string, then it is expected to be a
38 file path. If it is an instance of stream, it consider the object to be an
39 output stream.
40
41 Here's some examples on how to use this function:
42
43 csv()
44 .from('"1","2","3","4","5"')
45 .to(function(data){ console.log(data) })
46
47 csv()
48 .from('"1","2","3","4","5"')
49 .to('./path/to/file.csv')
50
51 csv()
52 .from('"1","2","3","4","5"')
53 .to(fs.createWriteStream('./path/to/file.csv'))
54 */
55
5678 var to;
5778 to = function(mixed, options) {
5813 var error;
5913 error = false;
6013 switch (typeof mixed) {
61 case 'string':
621 to.path(mixed, options);
631 break;
64 case 'object':
650 if (mixed instanceof Stream) {
660 to.stream(mixed, options);
67 } else {
680 error = true;
69 }
700 break;
71 case 'function':
7212 to.string(mixed, options);
7312 break;
74 default:
750 error = true;
76 }
7713 if (error) {
780 csv.error(new Error("Invalid mixed argument in from"));
79 }
8013 return csv;
81 };
82 /*
83
84 `to.options([options])`
85 -----------------------
86
87 Update and retrieve options relative to the output. Return the options
88 as an object if no argument is provided.
89
90 * `delimiter` Set the field delimiter, one character only, defaults to `options.from.delimiter` which is a comma.
91 * `quote` Defaults to the quote read option.
92 * `quoted` Boolean, default to false, quote all the fields even if not required.
93 * `escape` Defaults to the escape read option.
94 * `columns` List of fields, applied when `transform` returns an object, order matters, see the transform and the columns sections below.
95 * `header` Display the column names on the first line if the columns option is provided.
96 * `lineBreaks` String used to delimit record rows or a special value; special values are 'auto', 'unix', 'mac', 'windows', 'unicode'; defaults to 'auto' (discovered in source or 'unix' if no source is specified).
97 * `flags` Defaults to 'w', 'w' to create or overwrite an file, 'a' to append to a file. Applied when using the `toPath` method.
98 * `newColumns` If the `columns` option is not specified (which means columns will be taken from the reader options, will automatically append new columns if they are added during `transform()`.
99 * `end` Prevent calling `end` on the destination, so that destination is no longer writable, similar to passing `{end: false}` option in `stream.pipe()`.
100 */
101
10278 to.options = function(options) {
103116 if (options != null) {
10419 utils.merge(csv.options.to, options);
10519 return csv;
106 } else {
10797 return csv.options.to;
108 }
109 };
110 /*
111
112 `to.string(callback, [options])`
113 ------------------------------
114
115 Provide the output string to a callback.
116
117 csv()
118 .from( '"1","2","3","4"\n"a","b","c","d"' )
119 .to( function(data, count){} )
120
121 Callback is called with 2 arguments:
122 * data Stringify CSV string
123 * count Number of stringified records
124 */
125
12678 to.string = function(callback, options) {
12713 var data, stream;
12813 this.options(options);
12913 data = '';
13013 stream = new Stream;
13113 stream.writable = true;
13213 stream.write = function(d) {
13326 data += d;
13426 return true;
135 };
13613 stream.end = function() {
13713 return callback(data, csv.state.countWriten);
138 };
13913 csv.pipe(stream);
14013 return csv;
141 };
142 /*
143
144 `to.stream(stream, [options])`
145 ------------------------------
146
147 Write to a stream. Take a writable stream as first argument and
148 optionally an object of options as a second argument.
149 */
150
15178 to.stream = function(stream, options) {
15252 this.options(options);
15352 switch (csv.options.to.lineBreaks) {
154 case 'auto':
1550 csv.options.to.lineBreaks = null;
1560 break;
157 case 'unix':
1582 csv.options.to.lineBreaks = "\n";
1592 break;
160 case 'mac':
1611 csv.options.to.lineBreaks = "\r";
1621 break;
163 case 'windows':
1641 csv.options.to.lineBreaks = "\r\n";
1651 break;
166 case 'unicode':
1671 csv.options.to.lineBreaks = "\u2028";
168 }
16952 csv.pipe(stream);
17052 stream.on('error', function(e) {
1710 return csv.error(e);
172 });
17352 stream.on('close', function() {
17447 return csv.emit('close', csv.state.count);
175 });
17652 return csv;
177 };
178 /*
179
180 `to.path(path, [options])`
181 --------------------------
182
183 Write to a path. Take a file path as first argument and optionally an object of
184 options as a second argument. The `close` event is sent after the file is written.
185 Relying on the `end` event is incorrect because it is sent when parsing is done
186 but before the file is written.
187 */
188
18978 to.path = function(path, options) {
19051 var stream;
19151 this.options(options);
19251 options = utils.merge({}, csv.options.to);
19351 delete options.end;
19451 stream = fs.createWriteStream(path, options);
19551 csv.to.stream(stream, null);
19651 return csv;
197 };
19878 return to;
199};

stringifier.js

98%
67
66
1
LineHitsSource
1// Generated by CoffeeScript 1.3.3
2/*
3
4Stringifier
5===========
6
7Convert an array or an object into a CSV line.
8*/
9
101var Stringifier;
11
121Stringifier = function(csv) {
1378 this.csv = csv;
1478 return this;
15};
16
17/*
18Write a line to the written stream. Line may be an object, an array or a string
19The `preserve` argument is for line which are not considered as CSV data.
20*/
21
22
231Stringifier.prototype.write = function(line, preserve) {
2430491 if (typeof line === 'undefined' || line === null) {
259 return;
26 }
2730482 if (!preserve) {
2830475 try {
2930475 this.csv.emit('record', line, this.csv.state.count - 1);
30 } catch (e) {
311 return this.csv.error(e);
32 }
3330474 line = this.csv.stringifier.stringify(line);
34 }
3530481 this.csv.emit('data', line);
3630481 if (!preserve) {
3730474 this.csv.state.countWriten++;
38 }
3930481 return true;
40};
41
421Stringifier.prototype.stringify = function(line) {
4330474 var column, columns, containsLinebreak, containsQuote, containsdelimiter, delimiter, escape, field, i, newLine, quote, regexp, _i, _j, _line, _ref, _ref1;
4430474 columns = this.csv.options.to.columns || this.csv.options.from.columns;
4530474 if (typeof columns === 'object' && columns !== null && !Array.isArray(columns)) {
468 columns = Object.keys(columns);
47 }
4830474 delimiter = this.csv.options.to.delimiter || this.csv.options.from.delimiter;
4930474 quote = this.csv.options.to.quote || this.csv.options.from.quote;
5030474 escape = this.csv.options.to.escape || this.csv.options.from.escape;
5130474 if (typeof line === 'object') {
5229469 if (!Array.isArray(line)) {
531029 _line = [];
541029 if (columns) {
551023 for (i = _i = 0, _ref = columns.length; 0 <= _ref ? _i < _ref : _i > _ref; i = 0 <= _ref ? ++_i : --_i) {
563073 column = columns[i];
573073 _line[i] = typeof line[column] === 'undefined' || line[column] === null ? '' : line[column];
58 }
59 } else {
606 for (column in line) {
6114 _line.push(line[column]);
62 }
63 }
641029 line = _line;
651029 _line = null;
6628440 } else if (columns) {
6711 line.splice(columns.length);
68 }
6929469 if (Array.isArray(line)) {
7029469 newLine = this.csv.state.countWriten ? this.csv.options.to.lineBreaks || "\n" : '';
7129469 for (i = _j = 0, _ref1 = line.length; 0 <= _ref1 ? _j < _ref1 : _j > _ref1; i = 0 <= _ref1 ? ++_j : --_j) {
72220181 field = line[i];
73220181 if (typeof field === 'string') {
74
752027 } else if (typeof field === 'number') {
762017 field = '' + field;
7710 } else if (typeof field === 'boolean') {
784 field = field ? '1' : '';
796 } else if (field instanceof Date) {
800 field = '' + field.getTime();
81 }
82220181 if (field) {
83220151 containsdelimiter = field.indexOf(delimiter) >= 0;
84220151 containsQuote = field.indexOf(quote) >= 0;
85220151 containsLinebreak = field.indexOf("\r") >= 0 || field.indexOf("\n") >= 0;
86220151 if (containsQuote) {
873021 regexp = new RegExp(quote, 'g');
883021 field = field.replace(regexp, escape + quote);
89 }
90220151 if (containsQuote || containsdelimiter || containsLinebreak || this.csv.options.to.quoted) {
913035 field = quote + field + quote;
92 }
93220151 newLine += field;
94 }
95220181 if (i !== line.length - 1) {
96190712 newLine += delimiter;
97 }
98 }
9929469 line = newLine;
100 }
1011005 } else if (typeof line === 'number') {
1021003 line = '' + line;
103 }
10430474 return line;
105};
106
1071module.exports = function(csv) {
10878 return new Stringifier(csv);
109};
110
1111module.exports.Stringifier = Stringifier;

parser.js

96%
96
93
3
LineHitsSource
1// Generated by CoffeeScript 1.3.3
21var EventEmitter, Parser;
3
41EventEmitter = require('events').EventEmitter;
5
6/*
7
8Parsing
9=======
10
11The library extend the [EventEmitter][event] and emit the following events:
12
13* *row*
14 Emitted by the parser on each line with the line content as an array of fields.
15* *end*
16 Emitted when no more data will be parsed.
17* *error*
18 Emitted when an error occured.
19*/
20
21
221Parser = function(csv) {
2378 this.csv = csv;
2478 this.options = csv.options.from;
2578 this.state = csv.state;
2678 this.quoted = false;
2778 this.commented = false;
2878 this.lines = 0;
2978 return this;
30};
31
321Parser.prototype.__proto__ = EventEmitter.prototype;
33
34/*
35
36`parse(chars)`
37--------------
38
39Parse a string which may hold multiple lines.
40Private state object is enriched on each character until
41transform is called on a new line.
42*/
43
44
451Parser.prototype.parse = function(chars) {
4626420 var c, csv, escapeIsQuote, i, isEscape, isQuote, isReallyEscaped, l, nextChar;
4726420 csv = this.csv;
4826420 chars = '' + chars;
4926420 l = chars.length;
5026420 i = 0;
5126420 if (this.lines === 0 && csv.options.from.encoding === 'utf8' && 0xFEFF === chars.charCodeAt(0)) {
521 i++;
53 }
5426420 while (i < l) {
552020024 c = chars.charAt(i);
562020024 switch (c) {
57 case this.options.escape:
58 case this.options.quote:
593198 if (this.commented) {
600 break;
61 }
623198 isReallyEscaped = false;
633198 if (c === this.options.escape) {
643194 nextChar = chars.charAt(i + 1);
653194 escapeIsQuote = this.options.escape === this.options.quote;
663194 isEscape = nextChar === this.options.escape;
673194 isQuote = nextChar === this.options.quote;
683194 if (!(escapeIsQuote && !this.state.field && !this.quoted) && (isEscape || isQuote)) {
691012 i++;
701012 isReallyEscaped = true;
711012 c = chars.charAt(i);
721012 this.state.field += c;
73 }
74 }
753198 if (!isReallyEscaped && c === this.options.quote) {
762186 if (this.state.field && !this.quoted) {
773 this.state.field += c;
783 break;
79 }
802183 if (this.quoted) {
811091 nextChar = chars.charAt(i + 1);
821091 if (nextChar && nextChar !== '\r' && nextChar !== '\n' && nextChar !== this.options.delimiter) {
832 return this.error(new Error("Invalid closing quote at line " + (this.lines + 1) + "; found " + (JSON.stringify(nextChar)) + " instead of delimiter " + (JSON.stringify(this.options.delimiter))));
84 }
851089 this.quoted = false;
861092 } else if (this.state.field === '') {
871092 this.quoted = true;
88 }
89 }
903193 break;
91 case this.options.delimiter:
92186756 if (this.commented) {
930 break;
94 }
95186756 if (this.quoted) {
969 this.state.field += c;
97 } else {
98186747 if (this.options.trim || this.options.rtrim) {
9930 this.state.field = this.state.field.trimRight();
100 }
101186747 this.state.line.push(this.state.field);
102186747 this.state.field = '';
103 }
104186756 break;
105 case '\n':
106 case '\r':
10727445 if (this.quoted) {
1085 this.state.field += c;
1095 break;
110 }
11127440 if (!this.options.quoted && this.state.lastC === '\r') {
11214 break;
113 }
11427426 this.lines++;
11527426 if (csv.options.to.lineBreaks === null) {
11646 csv.options.to.lineBreaks = c + (c === '\r' && chars.charAt(i + 1) === '\n' ? '\n' : '');
117 }
11827426 if (this.options.trim || this.options.rtrim) {
1195 this.state.field = this.state.field.trimRight();
120 }
12127426 this.state.line.push(this.state.field);
12227426 this.state.field = '';
12327426 this.emit('row', this.state.line);
12427426 this.state.line = [];
12527426 break;
126 case ' ':
127 case '\t':
1281123 if (this.quoted || (!this.options.trim && !this.options.ltrim) || this.state.field) {
1291082 this.state.field += c;
1301082 break;
131 }
13241 break;
133 default:
1341801502 if (this.commented) {
1350 break;
136 }
1371801502 this.state.field += c;
138 }
1392020022 this.state.lastC = c;
1402020022 i++;
141 }
142};
143
1441Parser.prototype.end = function() {
14574 if (this.quoted) {
1461 return this.error(new Error("Quoted field not terminated at line " + (this.lines + 1)));
147 }
14873 if (this.state.field || this.state.lastC === this.options.delimiter || this.state.lastC === this.options.quote) {
14924 if (this.options.trim || this.options.rtrim) {
1501 this.state.field = this.state.field.trimRight();
151 }
15224 this.state.line.push(this.state.field);
15324 this.state.field = '';
154 }
15573 if (this.state.line.length > 0) {
15624 this.emit('row', this.state.line);
157 }
15873 return this.emit('end', null);
159};
160
1611Parser.prototype.error = function(e) {
1623 return this.emit('error', e);
163};
164
1651module.exports = function(csv) {
16678 return new Parser(csv);
167};
168
1691module.exports.Parser = Parser;
170
171/*
172[event]: http://nodejs.org/api/events.html
173*/
174

transformer.js

98%
75
74
1
LineHitsSource
1// Generated by CoffeeScript 1.3.3
21var Transformer, stream;
3
41stream = require('stream');
5
6/*
7Transforming data
8=================
9
10Transformation may occur synchronously or asynchronously dependending
11on the provided transform callback and its declared arguments length.
12
13Callback are called for each line and its arguments are :
14
15* *data*
16 CSV record
17* *index*
18 Incremented counter
19* *callback*
20 Callback function to be called in asynchronous mode
21
22Unless you specify the `columns` read option, `data` are provided
23as arrays, otherwise they are objects with keys matching columns
24names.
25
26In synchronous mode, the contract is quite simple, you receive an array
27of fields for each record and return the transformed record.
28
29In asynchronous mode, it is your responsibility to call the callback
30provided as the third argument. It must be called with two arguments,
31the first one is an error if any, the second is the transformed record.
32
33Transformed records may be an array, an associative array, a
34string or null. If null, the record will simply be skipped. When the
35returned value is an array, the fields are merged in order.
36When the returned value is an object, it will search for
37the `columns` property in the write or in the read options and
38smartly order the values. If no `columns` options are found,
39it will merge the values in their order of appearance. When the
40returned value is a string, it is directly sent to the destination
41source and it is your responsibility to delimit, quote, escape
42or define line breaks.
43
44Transform callback run synchronously:
45
46 csv()
47 .from('82,Preisner,Zbigniew\n94,Gainsbourg,Serge')
48 .to(console.log)
49 .transform(function(data, index){
50 return data.reverse()
51 });
52 // Executing `node samples/transform.js`, print:
53 // 94,Gainsbourg,Serge\n82,Preisner,Zbigniew
54
55Transform callback run asynchronously:
56
57 csv()
58 .from('82,Preisner,Zbigniew\n94,Gainsbourg,Serge')
59 .to(console.log)
60 .transform(function(data, index, callback){
61 process.nextTick(function(){
62 callback(null, data.reverse());
63 });
64 });
65 // Executing `node samples/transform.js`, print:
66 // 94,Gainsbourg,Serge\n82,Preisner,Zbigniew
67
68Transform callback returning a string:
69
70 csv()
71 .from('82,Preisner,Zbigniew\n94,Gainsbourg,Serge')
72 .to(console.log)
73 .transform(function(data, index){
74 return (index>0 ? ',' : '') + data[0] + ":" + data[2] + ' ' + data[1];
75 });
76 // Executing `node samples/transform.js`, print:
77 // 82:Zbigniew Preisner,94:Serge Gainsbourg
78*/
79
80
811Transformer = function(csv) {
8278 this.csv = csv;
8378 return this;
84};
85
861Transformer.prototype.__proto__ = stream.prototype;
87
88/* no doc
89
90`transformer(csv).transform(line)`
91----------------------------------
92
93Call a callback to transform a line. Called from the `parse` function on each
94line. It is responsible for transforming the data and finally calling `write`.
95*/
96
97
981Transformer.prototype.transform = function(line) {
9930482 var column, columns, csv, done, finish, i, lineAsObject, sync, _i, _j, _len, _len1;
10030482 csv = this.csv;
10130482 columns = csv.options.from.columns;
10230482 if (columns) {
10323 if (typeof columns === 'object' && columns !== null && !Array.isArray(columns)) {
1042 columns = Object.keys(columns);
105 }
10623 if (csv.state.count === 0 && columns === true) {
1076 csv.options.from.columns = line;
1086 return;
109 }
11017 if (Array.isArray(line)) {
11111 lineAsObject = {};
11211 for (i = _i = 0, _len = columns.length; _i < _len; i = ++_i) {
11358 column = columns[i];
11458 lineAsObject[column] = line[i] || null;
115 }
11611 line = lineAsObject;
117 } else {
1186 lineAsObject = {};
1196 for (i = _j = 0, _len1 = columns.length; _j < _len1; i = ++_j) {
12012 column = columns[i];
12112 lineAsObject[column] = line[column] || null;
122 }
1236 line = lineAsObject;
124 }
125 }
12630476 finish = (function(line) {
12730475 var k, v;
12830475 if (csv.state.count === 1 && csv.options.to.header === true) {
1297 columns = csv.options.to.columns || csv.options.from.columns;
1307 if (typeof columns === 'object') {
1317 columns = (function() {
1327 var _results;
1337 _results = [];
1347 for (k in columns) {
13519 v = columns[k];
13619 _results.push(v);
137 }
1387 return _results;
139 })();
140 }
1417 csv.stringifier.write(columns);
142 }
14330475 csv.stringifier.write(line);
14430475 if (csv.state.transforming === 0 && this.closed === true) {
1453 return this.emit('end', csv.state.count);
146 }
147 }).bind(this);
14830476 csv.state.count++;
14930476 if (this.callback) {
1501124 sync = this.callback.length !== 3;
1511124 csv.state.transforming++;
1521124 done = function(err, line) {
1531124 var isObject;
1541124 if (err) {
1551 return csv.error(err);
156 }
1571123 isObject = typeof line === 'object' && !Array.isArray(line);
1581123 if (csv.options.to.newColumns && !csv.options.to.columns && isObject) {
1592 Object.keys(line).filter(function(column) {
16014 return columns.indexOf(column) === -1;
161 }).forEach(function(column) {
1621 return columns.push(column);
163 });
164 }
1651123 csv.state.transforming--;
1661123 return finish(line);
167 };
1681124 if (sync) {
1691116 try {
1701116 return done(null, this.callback(line, csv.state.count - 1));
171 } catch (err) {
1721 return done(err);
173 }
174 } else {
1758 try {
1768 return this.callback(line, csv.state.count - 1, function(err, line) {
1778 return done(err, line);
178 });
179 } catch (_error) {}
180 }
181 } else {
18229352 return finish(line);
183 }
184};
185
186/* no doc
187`transformer(csv).end()`
188------------------------
189
190A transformer instance extends the EventEmitter and
191emit the 'end' event when the last callback is called.
192*/
193
194
1951Transformer.prototype.end = function() {
19673 if (this.closed) {
1970 return this.csv.error(new Error('Transformer already closed'));
198 }
19973 this.closed = true;
20073 if (this.csv.state.transforming === 0) {
20170 return this.emit('end');
202 }
203};
204
2051module.exports = function(csv) {
20678 return new Transformer(csv);
207};
208
2091module.exports.Transformer = Transformer;

generator.js

97%
43
42
1
LineHitsSource
1// Generated by CoffeeScript 1.3.3
21var Generator, Stream, util;
3
41Stream = require('stream');
5
61util = require('util');
7
8/*
9
10`generator([options])`: Generate random CSV data
11================================================
12
13This function is provided for conveniency in case you need to generate random CSV data.
14
15Note, it is quite simple at the moment, more functionnalities could come later. The code
16originates from "./samples/perf.coffee" and was later extracted in case other persons need
17its functionnalities.
18
19Options may include
20
21* duration Period to run in milliseconds, default to 4 minutes.
22* nb_columns Number of fields per record
23* max_word_length Maximum number of characters per word
24* start Start the generation on next tick, otherwise you must call resume
25
26Starting a generation
27
28 csv = require 'csv'
29 generator = csv.generator
30 generator(start: true).pipe csv().to.path "#{__dirname}/perf.out"
31*/
32
33
341Generator = function(options) {
352 var _base, _base1, _ref, _ref1;
362 this.options = options != null ? options : {};
372 if ((_ref = (_base = this.options).duration) == null) {
380 _base.duration = 4 * 60 * 1000;
39 }
402 this.options.nb_columns = 8;
412 if ((_ref1 = (_base1 = this.options).max_word_length) == null) {
422 _base1.max_word_length = 16;
43 }
442 this.start = Date.now();
452 this.end = this.start + this.options.duration;
462 this.readable = true;
472 if (this.options.start) {
482 process.nextTick(this.resume.bind(this));
49 }
502 return this;
51};
52
531Generator.prototype.__proto__ = Stream.prototype;
54
551Generator.prototype.resume = function() {
5615312 var char, column, line, nb_chars, nb_words, _i, _j, _ref, _ref1;
5715312 this.paused = false;
5815312 while (!this.paused && this.readable) {
5926288 if (Date.now() > this.end) {
602 return this.destroy();
61 }
6226286 line = [];
6326286 for (nb_words = _i = 0, _ref = this.options.nb_columns; 0 <= _ref ? _i < _ref : _i > _ref; nb_words = 0 <= _ref ? ++_i : --_i) {
64210288 column = [];
65210288 for (nb_chars = _j = 0, _ref1 = Math.ceil(Math.random() * this.options.max_word_length); 0 <= _ref1 ? _j < _ref1 : _j > _ref1; nb_chars = 0 <= _ref1 ? ++_j : --_j) {
661785881 char = Math.floor(Math.random() * 32);
671785881 column.push(String.fromCharCode(char + (char < 16 ? 65 : 97 - 16)));
68 }
69210288 line.push(column.join(''));
70 }
7126286 this.emit('data', new Buffer("" + (line.join(',')) + "\n", this.options.encoding));
72 }
73};
74
751Generator.prototype.pause = function() {
7615310 return this.paused = true;
77};
78
791Generator.prototype.destroy = function() {
802 this.readable = false;
812 this.emit('end');
822 return this.emit('close');
83};
84
85/*
86`setEncoding([encoding])`
87
88Makes the 'data' event emit a string instead of a Buffer.
89encoding can be 'utf8', 'utf16le' ('ucs2'), 'ascii', or
90'hex'. Defaults to 'utf8'.
91*/
92
93
941Generator.prototype.setEncoding = function(encoding) {
951 return this.options.encoding = encoding;
96};
97
981module.exports = function(options) {
992 return new Generator(options);
100};
101
1021module.exports.Generator = Generator;