Skip to content

Commit 096a4ef

Browse files
authored
Merge pull request #27 from ratel-rust/scientific_numbers
Adding support for scientific numbers Closes #12 🎉
2 parents 54498dd + f796146 commit 096a4ef

File tree

2 files changed

+46
-1
lines changed

2 files changed

+46
-1
lines changed

core/src/tokenizer.rs

Lines changed: 35 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -690,6 +690,10 @@ define_handlers! {
690690

691691
return Ok(Literal(tok.read_float(start)));
692692
},
693+
b'e' | b'E' => {
694+
tok.bump();
695+
return Ok(Literal(tok.read_scientific(start)));
696+
}
693697
_ => break,
694698
}
695699
}
@@ -715,6 +719,10 @@ define_handlers! {
715719

716720
return Ok(Literal(tok.read_float(start)));
717721
},
722+
b'e' | b'E' => {
723+
tok.bump();
724+
return Ok(Literal(tok.read_scientific(start)));
725+
},
718726
_ => break,
719727
}
720728
}
@@ -1054,7 +1062,6 @@ impl<'a> Tokenizer<'a> {
10541062

10551063
#[inline]
10561064
fn read_octal(&mut self, start: usize) -> Value {
1057-
10581065
while !self.is_eof() {
10591066
match self.read_byte() {
10601067
b'0'...b'7' => self.bump(),
@@ -1081,6 +1088,32 @@ impl<'a> Tokenizer<'a> {
10811088

10821089
#[inline]
10831090
fn read_float(&mut self, start: usize) -> Value {
1091+
while !self.is_eof() {
1092+
let ch = self.read_byte();
1093+
match ch {
1094+
b'0'...b'9' => self.bump(),
1095+
b'e' | b'E' => {
1096+
self.bump();
1097+
return self.read_scientific(start);
1098+
},
1099+
_ => break
1100+
}
1101+
}
1102+
1103+
let value = self.slice_source(start, self.index);
1104+
1105+
Value::Number(value)
1106+
}
1107+
1108+
#[inline]
1109+
fn read_scientific(&mut self, start: usize) -> Value {
1110+
if !self.is_eof() {
1111+
match self.read_byte() {
1112+
b'-' | b'+' => self.bump(),
1113+
_ => {}
1114+
}
1115+
}
1116+
10841117
while !self.is_eof() {
10851118
let ch = self.read_byte();
10861119
match ch {
@@ -1093,4 +1126,5 @@ impl<'a> Tokenizer<'a> {
10931126

10941127
Value::Number(value)
10951128
}
1129+
10961130
}

core/tests/tokenizer.rs

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -148,6 +148,17 @@ fn test_tokenizer_literals() {
148148
assert_token!("0O113", lit_num!("0O113"), "Value::Number");
149149
}
150150

151+
#[test]
152+
fn test_scientifix_numbers() {
153+
assert_token!("0e-2", Literal(Value::Number("0e-2".into())), "Value::Number");
154+
assert_token!("0e2", Literal(Value::Number("0e2".into())), "Value::Number");
155+
assert_token!("2e3", Literal(Value::Number("2e3".into())), "Value::Number");
156+
assert_token!("2e-3", Literal(Value::Number("2e-3".into())), "Value::Number");
157+
assert_token!("2e+3", Literal(Value::Number("2e+3".into())), "Value::Number");
158+
assert_token!("0.2e3", Literal(Value::Number("0.2e3".into())), "Value::Number");
159+
assert_token!("0.2e-3", Literal(Value::Number("0.2e-3".into())), "Value::Number");
160+
}
161+
151162
#[test]
152163
fn test_tokenizer_reserved() {
153164
assert_token!("enum", Reserved(ReservedKind::Enum), "ReservedKind::Enum");

0 commit comments

Comments
 (0)