Skip to content

Commit

Permalink
fix: Add more spec tests for the new tokenizer and fix issues in it
Browse files Browse the repository at this point in the history
  • Loading branch information
hangxingliu committed Apr 30, 2024
1 parent 7026fd1 commit 2a6fc8c
Show file tree
Hide file tree
Showing 15 changed files with 465 additions and 44 deletions.
65 changes: 65 additions & 0 deletions src/parser-v2/tests/mkosi-1.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
import { deepStrictEqual } from "assert";
import { tokenizer } from "../tokenizer.js";
import { TokenType } from "../types.js";

let exampleConf = [
//
//23456
"[Host]", // L1
"@Incremental=yes", // L2
"KernelCommandLineExtra=systemd.crash_shell=yes", // L3
" systemd.log_level=debug", // L4
].join("\n");

const repeat = (type: TokenType, times: number) => new Array<TokenType>(times).fill(type);
const tokenTypes = [
// L1
TokenType.none,
...repeat(TokenType.section, "[Host]".length - 1),
TokenType.unknown,
// L2
TokenType.none,
...repeat(TokenType.directiveKey, "@Incremental".length),
...repeat(TokenType.directiveValue, "yes".length),
TokenType.directiveValue,
// L3
TokenType.none,
...repeat(TokenType.directiveKey, "KernelCommandLineExtra".length),
...repeat(TokenType.directiveValue, "systemd.crash_shell=yes".length),
TokenType.directiveValue,
// L4
TokenType.none,
...repeat(TokenType.directiveValue, " systemd.log_level=debug".length),
];

for (let i = 0; i < tokenTypes.length; i++) {
const tokenType = tokenTypes[i];
const conf = exampleConf.slice(0, i);
const result = tokenizer(conf, { mkosi: true });
deepStrictEqual(
result.forecast,
tokenType,
`exampleConf[${i}].tokenType == ${tokenType} (conf: ${JSON.stringify(conf)})`
);

// const result2 = tokenizer(exampleConf, { cursor: i, mkosi: true });
// deepStrictEqual(result, result2, `exampleConf[${i}]`);
}

const prev = tokenizer(exampleConf, { mkosi: true });
exampleConf += "\n";
{
const result = tokenizer(exampleConf, { mkosi: true });
const result2 = tokenizer(exampleConf, { prevTokens: prev.tokens, mkosi: true });
deepStrictEqual(result.forecast, TokenType.none);
deepStrictEqual(result, result2);
}

exampleConf += " ";
{
console.log(JSON.stringify(exampleConf));
const result = tokenizer(exampleConf, { mkosi: true });
const result2 = tokenizer(exampleConf, { prevTokens: prev.tokens, mkosi: true });
deepStrictEqual(result.forecast, TokenType.directiveValue);
deepStrictEqual(result, result2);
}
45 changes: 45 additions & 0 deletions src/parser-v2/tests/mkosi-2.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import { tokenizer } from "../tokenizer.js";
import { AssertTokens, test } from "./utils.js";

let exampleConf = [
//
//23456
"[Host]", // L1
"@Incremental=yes", // L2
"KernelCommandLineExtra=systemd.crash_shell=yes", // L3
" systemd.log_level=debug", // L4
].join("\n");

test(exampleConf, ({ diagnosis }) => {
const { tokens } = tokenizer(exampleConf, { mkosi: true });
diagnosis(tokens);

const assert = new AssertTokens(tokens);
assert
.section("[Host]")
.key("@Incremental")
.assignment()
.value("yes")
.key("KernelCommandLineExtra")
.assignment()
.value("systemd.crash_shell=yes")
.value("systemd.log_level=debug");
});

test(exampleConf, ({ diagnosis }) => {
const { tokens } = tokenizer(exampleConf, {});
diagnosis(tokens);

const assert = new AssertTokens(tokens);
assert
.section("[Host]")
.key("@Incremental")
.assignment()
.value("yes")
.key("KernelCommandLineExtra")
.assignment()
.value("systemd.crash_shell=yes")
.key("systemd.log_level")
.assignment()
.value('debug');
});
25 changes: 25 additions & 0 deletions src/parser-v2/tests/mkosi-3.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import { deepStrictEqual } from "assert";
import { tokenizer } from "../tokenizer.js";
import { AssertTokens, test } from "./utils.js";
import { TokenType } from "../types.js";

test("[Host] ", ({ conf, diagnosis, range }) => {
const { tokens, forecast } = tokenizer(conf, { mkosi: true });
diagnosis(tokens);
deepStrictEqual(tokens, [
{ type: TokenType.section, range: range(0, 6), text: "[Host]" },
{ type: TokenType.unknown, range: range(6, 7), text: " " },
]);
deepStrictEqual(forecast, TokenType.unknown);
});

test("[Host] #", ({ conf, diagnosis, range }) => {
const { tokens, forecast } = tokenizer(conf, { mkosi: true });
diagnosis(tokens);
deepStrictEqual(tokens, [
{ type: TokenType.section, range: range(0, 6), text: "[Host]" },
{ type: TokenType.unknown, range: range(6, 7), text: " " },
{ type: TokenType.comment, range: range(7, 8), text: "#" },
]);
deepStrictEqual(forecast, TokenType.comment);
});
78 changes: 78 additions & 0 deletions src/parser-v2/tests/mkosi-real-file-1.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
import { readFileSync } from "fs";
import { resolve } from "path";
import { tokenizer } from "../tokenizer";
import { getDirectivesFromTokens } from "../get-directive-keys";
import { AssertTokens } from "./utils";

const filePath = resolve(__dirname, "../../../test/samples/mkosi/mkosi/mkosi.conf");
const fileContent = readFileSync(filePath, "utf-8");
const result = tokenizer(fileContent, { mkosi: true });
console.log(`tokens.length = ${result.tokens.length}`);

new AssertTokens(result.tokens)
.comment()
.section("[Output]")
.comment()
.comment()
.key("@Format")
.assignment()
.value("directory")
.key("@CacheDirectory")
.assignment()
.value("mkosi.cache")
.key("@OutputDirectory")
.assignment()
.value("mkosi.output")
//
.section("[Content]")
.key("Autologin")
.assignment()
.value("yes")
.key("@SELinuxRelabel")
.assignment()
.value("no")
.key("@ShimBootloader")
.assignment()
.value("unsigned")
.key("BuildSources")
.assignment()
.value(".")
.key("BuildSourcesEphemeral")
.assignment()
.value("yes")
//
.key("Packages")
.assignment()
.value("attr")
.value("ca-certificates")
.value("gdb")
.value("jq")
.value("less")
.value("nano")
.value("strace")
.value("tmux")
//
.key("InitrdPackages")
.assignment()
.value("less")
//
.key("RemoveFiles")
.assignment()
.comment()
.value("/usr/lib/kernel/install.d/20-grub.install")
.comment()
.value("/usr/lib/kernel/install.d/50-dracut.install")
.comment()
//
.key("KernelCommandLine")
.assignment()
.value("console=ttyS0 enforcing=0")
//
.section("[Host]")
.key("@QemuMem")
.assignment()
.value("4G");

// console.log(result.tokens);
// const directives = getDirectivesFromTokens(result.tokens);
// console.log(directives);
1 change: 1 addition & 0 deletions src/parser-v2/tests/tokenizer-1.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -176,6 +176,7 @@ test("[A]U\nK=\\\n", ({ conf, diagnosis, range, loc }) => {
});

test("[A]U\nK=\\\n #", ({ conf, diagnosis, range, loc }) => {
process.env.is_debug = '1';
const result = tokenizer(conf);
diagnosis(result.tokens);
deepStrictEqual(result.tokens, [
Expand Down
14 changes: 14 additions & 0 deletions src/parser-v2/tests/tokenizer-2.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import { deepStrictEqual } from "assert";
import { tokenizer } from "../tokenizer.js";
import { AssertTokens, test } from "./utils.js";
import { TokenType } from "../types.js";

test('Key = Value ', ({ conf, diagnosis }) => {
const prev = tokenizer(conf.slice(0, 6));

const { tokens, forecast } = tokenizer(conf, { prevTokens: prev.tokens });
diagnosis(tokens);

deepStrictEqual(forecast, TokenType.directiveValue);
new AssertTokens(tokens).key('Key ').assignment().value('Value ');
});
20 changes: 20 additions & 0 deletions src/parser-v2/tests/tokenizer-3.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import { deepStrictEqual } from "assert";
import { tokenizer } from "../tokenizer.js";
import { TokenType } from "../types.js";

let exampleConf = [
//
"[A", // L1
"", // L2
].join("\n");

const result1 = tokenizer(exampleConf);
deepStrictEqual(result1.forecast, TokenType.none);

exampleConf = [
//
"[A\\", // L1
"", // L2
].join("\n");
const result2 = tokenizer(exampleConf);
deepStrictEqual(result2.forecast, TokenType.none);
41 changes: 41 additions & 0 deletions src/parser-v2/tests/tokenizer-4.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import { deepStrictEqual } from "assert";
import { tokenizer } from "../tokenizer.js";
import { TokenType } from "../types.js";
import { AssertTokens, test } from "./utils.js";

let exampleConf = [
//
"[A]", // L1
"===", // L2
"=", // L3
"LOL", // L4
].join("\n");

test(exampleConf, ({ conf, diagnosis }) => {
const result1 = tokenizer(conf);
diagnosis(result1);

new AssertTokens(result1.tokens).section("[A]").assignment().value("==").assignment().key("LOL");
deepStrictEqual(result1.forecast, TokenType.directiveKey);
});

test("A=\\\n =C", ({ conf, diagnosis }) => {
const result1 = tokenizer(conf);
diagnosis(result1);

new AssertTokens(result1.tokens).key("A").assignment().value("\\").value(" =C");
deepStrictEqual(result1.forecast, TokenType.directiveValue);
});

test("A=A\n =C", ({ conf, diagnosis }) => {
const result1 = tokenizer(conf, { mkosi: true });
diagnosis(result1);

new AssertTokens(result1.tokens).key("A").assignment().value("A").value("=C");
deepStrictEqual(result1.forecast, TokenType.directiveValue);

const result2 = tokenizer(conf);
diagnosis(result2);
new AssertTokens(result2.tokens).key("A").assignment().value("A").assignment().value("C");
deepStrictEqual(result2.forecast, TokenType.directiveValue);
});
19 changes: 19 additions & 0 deletions src/parser-v2/tests/tokenizer-5.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import { deepStrictEqual } from "assert";
import { tokenizer } from "../tokenizer.js";
import { TokenType } from "../types.js";
import { AssertTokens, test } from "./utils.js";

test('K=\\\n[V]', ({ conf, diagnosis }) => {
const result1 = tokenizer(conf);
diagnosis(result1);

new AssertTokens(result1.tokens).key("K").assignment().value("\\").value("[V]");
deepStrictEqual(result1.forecast, TokenType.directiveValue);


const result2 = tokenizer(conf, { mkosi: true });
diagnosis(result2);

new AssertTokens(result2.tokens).key("K").assignment().value("\\").section("[V]");
deepStrictEqual(result2.forecast, TokenType.unknown);
});
56 changes: 55 additions & 1 deletion src/parser-v2/tests/utils.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,30 @@
import { inspect } from "util";
import { LocationTuple, RangeTuple } from "../types.js";
import { LocationTuple, RangeTuple, Token, TokenType } from "../types.js";
import { deepStrictEqual, ok } from "assert";

const DIM = `\u001b[2m`;
const RESET = `\u001b[0m`;

export const tokenTypeNames: { [x in TokenType]: string } = {
[TokenType.none]: "none",
[TokenType.comment]: "comment",
[TokenType.section]: "section",
[TokenType.directiveKey]: "key",
[TokenType.directiveValue]: "value",
[TokenType.assignment]: "assignment",
[TokenType.unknown]: "unknown",
};

export function dumpToken(token?: Token) {
if (!token) return `Undefined`;
let log = `Token { ${tokenTypeNames[token.type]}; `.padEnd(20);
const [from, to] = token.range;
log += `L${from[1] + 1},${from[2] + 1} ~ L${to[1] + 1},${to[2] + 1}; `.padEnd(16);
log += `text=${JSON.stringify(token.text)}; `;
log += `}`;
return log;
}

export class LocationUtils {
constructor(private readonly text: string) {}
private last?: LocationTuple;
Expand Down Expand Up @@ -35,6 +56,39 @@ export class LocationUtils {
}
}

export class AssertTokens {
private index = 0;
constructor(private readonly tokens: Token[]) {}
private validate(type: TokenType, text?: string) {
const i = this.index++;
const token = this.tokens[i];

const msg = `token[${i}] should be a ${tokenTypeNames[type]} node "${text}", actual: ${dumpToken(token)}`;
ok(token, msg);
deepStrictEqual(token.type, type);
if (typeof text !== "undefined") deepStrictEqual(token.text, text);
return this;
}
section(section: string) {
return this.validate(TokenType.section, section);
}
unknown(text: string) {
return this.validate(TokenType.unknown, text);
}
key(key: string) {
return this.validate(TokenType.directiveKey, key);
}
assignment() {
return this.validate(TokenType.assignment, "=");
}
value(key: string) {
return this.validate(TokenType.directiveValue, key);
}
comment(comment?: string) {
return this.validate(TokenType.comment, comment);
}
}

export type TestFn = (conf: string, fn: (ctx: TestFnContext) => void) => void;
export type TestFnContext = {
conf: string;
Expand Down
Loading

0 comments on commit 2a6fc8c

Please sign in to comment.