Skip to content

Commit 90afe0a

Browse files
Gergely Gyorgy Bothgergely-gyorgy-both
authored andcommitted
Fixed beautifier#2219 - formatting of new Angular control flow syntax
1 parent 1df08a0 commit 90afe0a

File tree

2 files changed

+60
-6
lines changed

2 files changed

+60
-6
lines changed

js/src/html/beautifier.js

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -111,6 +111,13 @@ Printer.prototype.indent = function() {
111111
this.indent_level++;
112112
};
113113

114+
Printer.prototype.deindent = function() {
115+
if (this.indent_level > 0 ) {
116+
this.indent_level--;
117+
this._output.set_indent(this.indent_level, this.alignment_size);
118+
}
119+
};
120+
114121
Printer.prototype.get_full_indent = function(level) {
115122
level = this.indent_level + (level || 0);
116123
if (level < 1) {
@@ -305,6 +312,10 @@ Beautifier.prototype.beautify = function() {
305312
parser_token = this._handle_tag_close(printer, raw_token, last_tag_token);
306313
} else if (raw_token.type === TOKEN.TEXT) {
307314
parser_token = this._handle_text(printer, raw_token, last_tag_token);
315+
} else if(raw_token.type === TOKEN.CONTROL_FLOW_OPEN) {
316+
parser_token = this._handle_control_flow_open(printer, raw_token);
317+
} else if(raw_token.type === TOKEN.CONTROL_FLOW_CLOSE) {
318+
parser_token = this._handle_control_flow_close(printer, raw_token);
308319
} else {
309320
// This should never happen, but if it does. Print the raw token
310321
printer.add_raw_token(raw_token);
@@ -319,6 +330,30 @@ Beautifier.prototype.beautify = function() {
319330
return sweet_code;
320331
};
321332

333+
Beautifier.prototype._handle_control_flow_open = function(printer, raw_token) {
334+
var parser_token = {
335+
text: raw_token.text,
336+
type: raw_token.type
337+
};
338+
339+
printer.print_newline(true); // TODO: handle indentation based on brace_style (and preserve-inline)
340+
printer.print_token(raw_token);
341+
printer.indent();
342+
return parser_token;
343+
};
344+
345+
Beautifier.prototype._handle_control_flow_close = function(printer, raw_token) {
346+
var parser_token = {
347+
text: raw_token.text,
348+
type: raw_token.type
349+
};
350+
351+
printer.deindent();
352+
printer.print_newline(true);
353+
printer.print_token(raw_token);
354+
return parser_token;
355+
};
356+
322357
Beautifier.prototype._handle_tag_close = function(printer, raw_token, last_tag_token) {
323358
var parser_token = {
324359
text: raw_token.text,

js/src/html/tokenizer.js

Lines changed: 25 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,8 @@ var Pattern = require('../core/pattern').Pattern;
3737
var TOKEN = {
3838
TAG_OPEN: 'TK_TAG_OPEN',
3939
TAG_CLOSE: 'TK_TAG_CLOSE',
40+
CONTROL_FLOW_OPEN: 'TK_CONTROL_FLOW_OPEN',
41+
CONTROL_FLOW_CLOSE: 'TK_CONTROL_FLOW_CLOSE',
4042
ATTRIBUTE: 'TK_ATTRIBUTE',
4143
EQUALS: 'TK_EQUALS',
4244
VALUE: 'TK_VALUE',
@@ -60,6 +62,7 @@ var Tokenizer = function(input_string, options) {
6062
var pattern_reader = new Pattern(this._input);
6163

6264
this.__patterns = {
65+
control_flow_open: templatable_reader.starting_with(/[@]/).until_after(/[{]/),
6366
word: templatable_reader.until(/[\n\r\t <]/),
6467
single_quote: templatable_reader.until_after(/'/),
6568
double_quote: templatable_reader.until_after(/"/),
@@ -97,14 +100,16 @@ Tokenizer.prototype._is_comment = function(current_token) { // jshint unused:fal
97100
};
98101

99102
Tokenizer.prototype._is_opening = function(current_token) {
100-
return current_token.type === TOKEN.TAG_OPEN;
103+
return current_token.type === TOKEN.TAG_OPEN || current_token.type === TOKEN.CONTROL_FLOW_OPEN;
101104
};
102105

103106
Tokenizer.prototype._is_closing = function(current_token, open_token) {
104-
return current_token.type === TOKEN.TAG_CLOSE &&
107+
return (current_token.type === TOKEN.TAG_CLOSE &&
105108
(open_token && (
106109
((current_token.text === '>' || current_token.text === '/>') && open_token.text[0] === '<') ||
107-
(current_token.text === '}}' && open_token.text[0] === '{' && open_token.text[1] === '{')));
110+
(current_token.text === '}}' && open_token.text[0] === '{' && open_token.text[1] === '{')))
111+
) || (current_token.type === TOKEN.CONTROL_FLOW_CLOSE &&
112+
(current_token.text === '}' && open_token.text.endsWith('{')));
108113
};
109114

110115
Tokenizer.prototype._reset = function() {
@@ -123,6 +128,7 @@ Tokenizer.prototype._get_next_token = function(previous_token, open_token) { //
123128
token = token || this._read_open_handlebars(c, open_token);
124129
token = token || this._read_attribute(c, previous_token, open_token);
125130
token = token || this._read_close(c, open_token);
131+
token = token || this._read_control_flows(c);
126132
token = token || this._read_raw_content(c, previous_token, open_token);
127133
token = token || this._read_content_word(c);
128134
token = token || this._read_comment_or_cdata(c);
@@ -189,7 +195,7 @@ Tokenizer.prototype._read_processing = function(c) { // jshint unused:false
189195
Tokenizer.prototype._read_open = function(c, open_token) {
190196
var resulting_string = null;
191197
var token = null;
192-
if (!open_token) {
198+
if (!open_token || open_token.type === TOKEN.CONTROL_FLOW_OPEN) {
193199
if (c === '<') {
194200

195201
resulting_string = this._input.next();
@@ -206,7 +212,7 @@ Tokenizer.prototype._read_open = function(c, open_token) {
206212
Tokenizer.prototype._read_open_handlebars = function(c, open_token) {
207213
var resulting_string = null;
208214
var token = null;
209-
if (!open_token) {
215+
if (!open_token || open_token.type === TOKEN.CONTROL_FLOW_OPEN) {
210216
if (this._options.indent_handlebars && c === '{' && this._input.peek(1) === '{') {
211217
if (this._input.peek(2) === '!') {
212218
resulting_string = this.__patterns.handlebars_comment.read();
@@ -221,11 +227,24 @@ Tokenizer.prototype._read_open_handlebars = function(c, open_token) {
221227
return token;
222228
};
223229

230+
Tokenizer.prototype._read_control_flows = function (c) {
231+
var resulting_string = null;
232+
var token = null;
233+
if (c === '@' && /[a-zA-Z0-9]/.test(this._input.peek(1))) {
234+
resulting_string = this.__patterns.control_flow_open.read();
235+
token = this._create_token(TOKEN.CONTROL_FLOW_OPEN, resulting_string);
236+
} else if (c === '}' && this._input.peek(1) !== '}' && this._input.peek(-1) !== '}') {
237+
resulting_string = this._input.next();
238+
token = this._create_token(TOKEN.CONTROL_FLOW_CLOSE, resulting_string);
239+
}
240+
return token;
241+
};
242+
224243

225244
Tokenizer.prototype._read_close = function(c, open_token) {
226245
var resulting_string = null;
227246
var token = null;
228-
if (open_token) {
247+
if (open_token && open_token.type === TOKEN.TAG_OPEN) {
229248
if (open_token.text[0] === '<' && (c === '>' || (c === '/' && this._input.peek(1) === '>'))) {
230249
resulting_string = this._input.next();
231250
if (c === '/') { // for close tag "/>"

0 commit comments

Comments
 (0)