Skip to content

Commit 83af066

Browse files
committed
Adding support for scientific numbers
1 parent 54498dd commit 83af066

File tree

2 files changed

+63
-4
lines changed

2 files changed

+63
-4
lines changed

core/src/tokenizer.rs

Lines changed: 49 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -690,6 +690,10 @@ define_handlers! {
690690

691691
return Ok(Literal(tok.read_float(start)));
692692
},
693+
b'e' | b'E' => {
694+
tok.bump();
695+
return Ok(Literal(tok.read_scientific(start)));
696+
}
693697
_ => break,
694698
}
695699
}
@@ -715,6 +719,10 @@ define_handlers! {
715719

716720
return Ok(Literal(tok.read_float(start)));
717721
},
722+
b'e' | b'E' => {
723+
tok.bump();
724+
return Ok(Literal(tok.read_scientific(start)));
725+
},
718726
_ => break,
719727
}
720728
}
@@ -1054,7 +1062,6 @@ impl<'a> Tokenizer<'a> {
10541062

10551063
#[inline]
10561064
fn read_octal(&mut self, start: usize) -> Value {
1057-
10581065
while !self.is_eof() {
10591066
match self.read_byte() {
10601067
b'0'...b'7' => self.bump(),
@@ -1070,9 +1077,19 @@ impl<'a> Tokenizer<'a> {
10701077
while !self.is_eof() {
10711078
match self.read_byte() {
10721079
b'0'...b'9' => self.bump(),
1073-
b'a'...b'f' => self.bump(),
1074-
b'A'...b'F' => self.bump(),
1075-
_ => break
1080+
b'a'...b'd' => self.bump(),
1081+
b'A'...b'D' => self.bump(),
1082+
b'f' | b'F' => self.bump(),
1083+
b'e' | b'E' => {
1084+
self.bump();
1085+
match self.peek_byte() {
1086+
b'-' | b'+' => {
1087+
return self.read_scientific(start);
1088+
},
1089+
_ => {}
1090+
}
1091+
},
1092+
_ => break
10761093
};
10771094
}
10781095

@@ -1081,6 +1098,33 @@ impl<'a> Tokenizer<'a> {
10811098

10821099
#[inline]
10831100
fn read_float(&mut self, start: usize) -> Value {
1101+
while !self.is_eof() {
1102+
let ch = self.read_byte();
1103+
match ch {
1104+
b'0'...b'9' => self.bump(),
1105+
b'e' | b'E' => {
1106+
self.bump();
1107+
return self.read_scientific(start);
1108+
},
1109+
_ => break
1110+
}
1111+
}
1112+
1113+
let value = self.slice_source(start, self.index);
1114+
1115+
Value::Number(value)
1116+
}
1117+
1118+
#[inline]
1119+
fn read_scientific(&mut self, start: usize) -> Value {
1120+
while !self.is_eof() {
1121+
let ch = self.read_byte();
1122+
match ch {
1123+
b'-' | b'+' => self.bump(),
1124+
_ => break
1125+
}
1126+
}
1127+
10841128
while !self.is_eof() {
10851129
let ch = self.read_byte();
10861130
match ch {
@@ -1093,4 +1137,5 @@ impl<'a> Tokenizer<'a> {
10931137

10941138
Value::Number(value)
10951139
}
1140+
10961141
}

core/tests/tokenizer.rs

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -148,6 +148,20 @@ fn test_tokenizer_literals() {
148148
assert_token!("0O113", lit_num!("0O113"), "Value::Number");
149149
}
150150

151+
#[test]
152+
fn test_scientifix_numbers() {
153+
assert_token!("0e-2", Literal(Value::Number("0e-2".into())), "Value::Number");
154+
assert_token!("0e2", Literal(Value::Number("0e2".into())), "Value::Number");
155+
assert_token!("2e3", Literal(Value::Number("2e3".into())), "Value::Number");
156+
assert_token!("2e-3", Literal(Value::Number("2e-3".into())), "Value::Number");
157+
assert_token!("2e+3", Literal(Value::Number("2e+3".into())), "Value::Number");
158+
assert_token!("0.2e3", Literal(Value::Number("0.2e3".into())), "Value::Number");
159+
assert_token!("0.2e-3", Literal(Value::Number("0.2e-3".into())), "Value::Number");
160+
assert_token!("0x1e3", Literal(Value::Number("0x1e3".into())), "Value::Number");
161+
assert_token!("0x21e3", Literal(Value::Number("0x21e3".into())), "Value::Number");
162+
assert_token!("0x21e-3", Literal(Value::Number("0x21e-3".into())), "Value::Number");
163+
}
164+
151165
#[test]
152166
fn test_tokenizer_reserved() {
153167
assert_token!("enum", Reserved(ReservedKind::Enum), "ReservedKind::Enum");

0 commit comments

Comments
 (0)