This repository has been archived by the owner on Feb 9, 2025. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathtests.js
115 lines (101 loc) · 2.98 KB
/
tests.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
var test = require('tape');
var test = require('tape');
var tokenizer = require('./index');
var setup = function(writes, callback){
var tokens = [];
var token_stream = tokenizer();
token_stream.addRule(/^[\s]+$/, 'whitespace');
token_stream.addRule(/^"([^"\n]|\\")*"$/, 'string');
token_stream.addRule(/^[^"0-9\s][^\s]*$/, 'symbol');
token_stream.addRule(/^[-+]?[0-9]+\.?[0-9]*$/, 'number');
token_stream.on('data', function(token){
tokens.push([token.type, token.src, token.line, token.col]);
});
token_stream.on('end', function(){
callback(undefined, tokens);
});
var nextWrite = function(){
var write = writes.shift();
if(!write){
process.nextTick(function(){
token_stream.end();
});
return;
}
process.nextTick(function(){
token_stream.write(write);
nextWrite();
});
};
nextWrite();
};
var assertsForTheHelloWorldString = function(t){
return function(err, tokens){
if(err) return t.end(err);
t.deepEquals(tokens[ 0], ['symbol' , 'hello' , 1,1]);
t.deepEquals(tokens[ 1], ['whitespace', ' ' , 1,6]);
t.deepEquals(tokens[ 2], ['symbol' , 'world' , 1,7]);
t.deepEquals(tokens[ 3], ['whitespace', '\n ' , 1,12]);
t.deepEquals(tokens[ 4], ['string' , '"a string"', 2,2]);
t.deepEquals(tokens[ 5], ['whitespace', ' ' , 2,12]);
t.deepEquals(tokens[ 6], ['number' , '100.25' , 2,14]);
t.deepEquals(tokens[ 7], ['whitespace', '\n' , 2,20]);
t.deepEquals(tokens[ 8], ['symbol' , 'one2three' , 3,1]);
t.equals(tokens.length, 9);
t.end();
};
};
test("all in one chunk", function(t){
setup([
'hello world\n "a string" 100.25\none2three'
], assertsForTheHelloWorldString(t));
});
test("broken up", function(t){
setup([
'hello world\n',
' "a string" ',
' 100.25\n',
'one2three'
], assertsForTheHelloWorldString(t));
});
test("broken up in inconvenient places", function(t){
setup([
'he',
'llo',
' world\n ',
'"a ',
'string',
'" 100',
'.',
'25',
'\none',
'2',
'three'
], assertsForTheHelloWorldString(t));
});
test("one char at a time", function(t){
setup('hello world\n "a string" 100.25\none2three'.split(''), assertsForTheHelloWorldString(t));
});
test("error on no match", function(t){
var token_stream = tokenizer();
token_stream.addRule(/^[\s]+$/, 'whitespace');
token_stream.on('data', function(token){
t.deepEquals(token, {type: 'whitespace', src: ' ', line: 1, col: 1});
});
token_stream.on('error', function(err){
t.equals(String(err), 'Error: unable to tokenize');
t.equals(err.tokenizer2.buffer, "10 01");
t.equals(err.tokenizer2.line, 1);
t.equals(err.tokenizer2.col, 2);
t.end();
});
token_stream.on('end', function(){
t.fail('should\'ve failed instead of ending');
});
process.nextTick(function(){
token_stream.write(' 10 01');
process.nextTick(function(){
token_stream.end();
});
});
});