@@ -37,6 +37,8 @@ var Pattern = require('../core/pattern').Pattern;
37
37
var TOKEN = {
38
38
TAG_OPEN : 'TK_TAG_OPEN' ,
39
39
TAG_CLOSE : 'TK_TAG_CLOSE' ,
40
+ CONTROL_FLOW_OPEN : 'TK_CONTROL_FLOW_OPEN' ,
41
+ CONTROL_FLOW_CLOSE : 'TK_CONTROL_FLOW_CLOSE' ,
40
42
ATTRIBUTE : 'TK_ATTRIBUTE' ,
41
43
EQUALS : 'TK_EQUALS' ,
42
44
VALUE : 'TK_VALUE' ,
@@ -60,6 +62,7 @@ var Tokenizer = function(input_string, options) {
60
62
var pattern_reader = new Pattern ( this . _input ) ;
61
63
62
64
this . __patterns = {
65
+ control_flow_open : templatable_reader . starting_with ( / [ @ ] / ) . until_after ( / [ { ] / ) ,
63
66
word : templatable_reader . until ( / [ \n \r \t < ] / ) ,
64
67
single_quote : templatable_reader . until_after ( / ' / ) ,
65
68
double_quote : templatable_reader . until_after ( / " / ) ,
@@ -97,14 +100,16 @@ Tokenizer.prototype._is_comment = function(current_token) { // jshint unused:fal
97
100
} ;
98
101
99
102
Tokenizer . prototype . _is_opening = function ( current_token ) {
100
- return current_token . type === TOKEN . TAG_OPEN ;
103
+ return current_token . type === TOKEN . TAG_OPEN || current_token . type === TOKEN . CONTROL_FLOW_OPEN ;
101
104
} ;
102
105
103
106
Tokenizer . prototype . _is_closing = function ( current_token , open_token ) {
104
- return current_token . type === TOKEN . TAG_CLOSE &&
107
+ return ( current_token . type === TOKEN . TAG_CLOSE &&
105
108
( open_token && (
106
109
( ( current_token . text === '>' || current_token . text === '/>' ) && open_token . text [ 0 ] === '<' ) ||
107
- ( current_token . text === '}}' && open_token . text [ 0 ] === '{' && open_token . text [ 1 ] === '{' ) ) ) ;
110
+ ( current_token . text === '}}' && open_token . text [ 0 ] === '{' && open_token . text [ 1 ] === '{' ) ) )
111
+ ) || ( current_token . type === TOKEN . CONTROL_FLOW_CLOSE &&
112
+ ( current_token . text === '}' && open_token . text . endsWith ( '{' ) ) ) ;
108
113
} ;
109
114
110
115
Tokenizer . prototype . _reset = function ( ) {
@@ -123,6 +128,7 @@ Tokenizer.prototype._get_next_token = function(previous_token, open_token) { //
123
128
token = token || this . _read_open_handlebars ( c , open_token ) ;
124
129
token = token || this . _read_attribute ( c , previous_token , open_token ) ;
125
130
token = token || this . _read_close ( c , open_token ) ;
131
+ token = token || this . _read_control_flows ( c ) ;
126
132
token = token || this . _read_raw_content ( c , previous_token , open_token ) ;
127
133
token = token || this . _read_content_word ( c ) ;
128
134
token = token || this . _read_comment_or_cdata ( c ) ;
@@ -189,7 +195,7 @@ Tokenizer.prototype._read_processing = function(c) { // jshint unused:false
189
195
Tokenizer . prototype . _read_open = function ( c , open_token ) {
190
196
var resulting_string = null ;
191
197
var token = null ;
192
- if ( ! open_token ) {
198
+ if ( ! open_token || open_token . type === TOKEN . CONTROL_FLOW_OPEN ) {
193
199
if ( c === '<' ) {
194
200
195
201
resulting_string = this . _input . next ( ) ;
@@ -206,7 +212,7 @@ Tokenizer.prototype._read_open = function(c, open_token) {
206
212
Tokenizer . prototype . _read_open_handlebars = function ( c , open_token ) {
207
213
var resulting_string = null ;
208
214
var token = null ;
209
- if ( ! open_token ) {
215
+ if ( ! open_token || open_token . type === TOKEN . CONTROL_FLOW_OPEN ) {
210
216
if ( this . _options . indent_handlebars && c === '{' && this . _input . peek ( 1 ) === '{' ) {
211
217
if ( this . _input . peek ( 2 ) === '!' ) {
212
218
resulting_string = this . __patterns . handlebars_comment . read ( ) ;
@@ -221,11 +227,24 @@ Tokenizer.prototype._read_open_handlebars = function(c, open_token) {
221
227
return token ;
222
228
} ;
223
229
230
+ Tokenizer . prototype . _read_control_flows = function ( c ) {
231
+ var resulting_string = null ;
232
+ var token = null ;
233
+ if ( c === '@' && / [ a - z A - Z 0 - 9 ] / . test ( this . _input . peek ( 1 ) ) ) {
234
+ resulting_string = this . __patterns . control_flow_open . read ( ) ;
235
+ token = this . _create_token ( TOKEN . CONTROL_FLOW_OPEN , resulting_string ) ;
236
+ } else if ( c === '}' && this . _input . peek ( 1 ) !== '}' && this . _input . peek ( - 1 ) !== '}' ) {
237
+ resulting_string = this . _input . next ( ) ;
238
+ token = this . _create_token ( TOKEN . CONTROL_FLOW_CLOSE , resulting_string ) ;
239
+ }
240
+ return token ;
241
+ } ;
242
+
224
243
225
244
Tokenizer . prototype . _read_close = function ( c , open_token ) {
226
245
var resulting_string = null ;
227
246
var token = null ;
228
- if ( open_token ) {
247
+ if ( open_token && open_token . type === TOKEN . TAG_OPEN ) {
229
248
if ( open_token . text [ 0 ] === '<' && ( c === '>' || ( c === '/' && this . _input . peek ( 1 ) === '>' ) ) ) {
230
249
resulting_string = this . _input . next ( ) ;
231
250
if ( c === '/' ) { // for close tag "/>"
0 commit comments