diff --git a/interfaces/interpolate.d.ts b/interfaces/interpolate.d.ts
index 22ae5fb..771a399 100644
--- a/interfaces/interpolate.d.ts
+++ b/interfaces/interpolate.d.ts
@@ -2,27 +2,26 @@ declare namespace Expand {
type Key = '$' | '=' | '^';
type Open = '{' | '(';
type Terminal = '}' | ')' | ' ' | '__null__';
- type Op = 'v' | 's' | 'e' | 'f';
+ type Op = 'v' | 's' | 'e' | 'f' | 'n';
interface State {
detecting?: Key
header?: Key
op?: Op
- terminal?: Terminal;
+ terminal?: Terminal
dirty?: boolean
escape?: boolean
- sourceMap: number[]
+ escaped?: string
+ sourceMap?: number[]
}
- interface Elem {
- state: State
- raw: any[]
+ interface Elem extends State {
+ out: any[]
source: any[]
- subst: any[]
}
export interface Options {
- dereferenceSync?: (sub: string, sourceMap?: number[]) => any
+ dereferenceSync?: (sub: string, sourceMap?: number[]) => any
dereference?: (sub: string, sourceMap?: number[]) => any
call?: (sub: any, sourceMap?: number[]) => any
fetch?: (sub: any, sourceMap?: number[]) => any
diff --git a/interfaces/moss.d.ts b/interfaces/moss.d.ts
index fe228a8..04e8dd8 100644
--- a/interfaces/moss.d.ts
+++ b/interfaces/moss.d.ts
@@ -1,4 +1,5 @@
///
+///
interface MossError {
name: 'MossError',
@@ -20,6 +21,7 @@ declare namespace Moss {
auto?: any
stack?: any
selectors?: any
+ schema?: any
merge?: {
operator: Merge.Operator,
precedence: { [x: string]: number }
diff --git a/interfaces/schema.d.ts b/interfaces/schema.d.ts
new file mode 100644
index 0000000..d8abbfb
--- /dev/null
+++ b/interfaces/schema.d.ts
@@ -0,0 +1,18 @@
+declare namespace Moss {
+ namespace Schema {
+ // interface Options {
+ // scalarType?: string
+ // singleType?: Schema.Options
+ // multiType?: { [x: string]: Schema.Options } | Schema.Options[]
+ // isArray?: boolean
+ // isMap?: boolean
+ // }
+ interface Options {
+ type: string,
+ properties?: {[x: string]: Options},
+ items?: Options[],
+ $id?: string
+ }
+ type Description = Options | [Options] | string
+ }
+}
\ No newline at end of file
diff --git a/src/async.ts b/src/async.ts
index 0dfe54f..a69f0a9 100644
--- a/src/async.ts
+++ b/src/async.ts
@@ -1,7 +1,7 @@
///
-import { merge, mergeArray, mergeObject, amap as map, aokmap as okmap, arrayify, extend, check, clone, each, union, difference, sum, valueForKeyPath, all, isEqual, unflatten, flatObject, unsetKeyPath, setValueForKeyPath, mergeOrReturnAssignment } from 'typed-json-transform';
-import { interpolateAsync as __interpolate } from './interpolate';
+import { merge, mergeArray, mergeObject, okmap as okmapSync, amap as map, aokmap as okmap, arrayify, extend, check, clone, each, union, difference, sum, valueForKeyPath, all, isEqual, unflatten, flatObject, unsetKeyPath, setValueForKeyPath, mergeOrReturnAssignment, contains } from 'typed-json-transform';
+import { interpolateAsync as __interpolate, reservedKeys } from './interpolate';
import { cascadeAsync as _cascade, shouldConstruct, select, parseSelectors } from './cascade';
import * as yaml from 'js-yaml';
@@ -17,6 +17,7 @@ import {
import { handleError } from './util';
import { Sync } from './sync';
+import { parseDescription } from './schema';
export namespace Async {
type Functions = Moss.Async.Functions;
@@ -35,7 +36,9 @@ export namespace Async {
const target = state.target || current.data;
let res;
- for (const _key of Object.keys(source)) {
+ const keys = Object.keys(source);
+ console.log('evaluate keys', keys);
+ for (const _key of keys) {
if (!_key) {
continue;
}
@@ -70,14 +73,11 @@ export namespace Async {
} else {
let val = source[_key];
if (_key[0] === '$') {
- key = (await interpolate(current, _key)).data;
- } else if (_key[0] == '\\') {
- key = key.slice(1);
- } else if (_key.indexOf('.') != -1) {
- const [first, ...kp] = _key.split('.')
- key = first;
- val = {};
- setValueForKeyPath(source[_key], kp.join('.'), val);
+ if (!contains(reservedKeys, _key)) {
+ key = (await interpolate(current, _key)).data;
+ } else {
+ key = _key;
+ }
} else {
key = _key;
}
@@ -87,10 +87,14 @@ export namespace Async {
if (key) {
state.auto[key] = res;
state.autoMap[key] = currentErrorPath(state).path.join('.');
+
target[key] = res;
}
currentErrorPath(state).path.pop();
}
+ if (current.state.schema) {
+ // check
+ }
return current;
}
@@ -209,6 +213,14 @@ export namespace Async {
$: async (current: Moss.ReturnValue, args: any) => {
await parseNextStructure(current, args);
},
+ schema: async (current: Moss.ReturnValue, args: any) => {
+ const description = await continueWithNewFrame(current, args);
+ current.state.schema = parseDescription(description.data);
+ current.data = current.state.schema;
+ },
+ validate: async (current: Moss.ReturnValue, args: any) => {
+ const schema = await continueWithNewFrame(current, args);
+ },
extend: async (parent: Moss.ReturnValue, args: any) => {
const layer = await continueWithNewFrame(parent, args);
const { data } = layer;
diff --git a/src/interpolate/async.ts b/src/interpolate/async.ts
index 97664d6..cbc2af3 100644
--- a/src/interpolate/async.ts
+++ b/src/interpolate/async.ts
@@ -1,7 +1,7 @@
import { check } from 'typed-json-transform';
const expression = require('../../compiled/expression');
-import { newState, parse, reduce, append as _append, pop } from './shared';
+import { newState, parse, reduce, append as _append } from './shared';
export async function tokenize(str: string, options: Expand.Options) {
const { dereference, dereferenceSync, call, shell, fetch } = options;
@@ -11,45 +11,42 @@ export async function tokenize(str: string, options: Expand.Options) {
let x = 0;
let y = 0;
- const stack: Expand.Elem[][] = [[{ state: { sourceMap: [0, str.length] }, raw: [], subst: [], source: [] }]];
- let ptr = stack[x][y];
+ const stack: Expand.Elem[] = [newState()];
+ let frame = stack[y];
-
- const append = (char: string) => {
- let nextChunk = false;
- if (ptr.state.op) {
- nextChunk = _append(ptr.subst, char)
- } else {
- nextChunk = _append(ptr.raw, char)
+ const append = (val: any) => {
+ if (frame.escape) {
+ frame.escaped += val;
+ return;
}
+ let nextChunk = false;
+ nextChunk = _append(frame.out, val)
if (nextChunk) {
- ptr.source.push(char);
- } else {
- ptr.source[(ptr.source.length - 1) || 0] += char;
- }
- }
-
- const stepBack = () => {
- if (ptr.state.op) {
- pop(ptr.subst);
+ frame.source.push(val);
} else {
- pop(ptr.raw);
+ frame.source[(frame.source.length - 1) || 0] += val;
}
}
- const open = (op: Expand.Op, terminal: Expand.Terminal) => {
- stepBack();
- ptr.state.header = null;
- ptr.state.detecting = null;
- const existing = ptr.state.op;
- if (existing) {
- y++;
- stack[x][y] = newState();
- ptr = stack[x][y];
- ptr.raw = [];
+ const open = (char: string, op: Expand.Op, terminal: Expand.Terminal) => {
+ const { escape, escaped } = frame;
+ if (escape) {
+ frame.escape = null;
+ frame.escaped = '';
+ const directive = escaped.slice(1);
+ if (!directive){
+ throw new Error('explicit interpolate without 2 char prefix directive');
+ }
+ append(directive);
+ if (char) {
+ append(char);
+ }
}
- ptr.state.op = op;
- ptr.state.terminal = terminal;
+ frame.header = null;
+ frame.detecting = null;
+ y++;
+ stack[y] = newState({ op: escape ? 'n' : op, terminal });
+ frame = stack[y];
}
const sub = async (fn: (s: string, location: any) => any, str: string, sourceMap?: number[]) => {
@@ -85,106 +82,130 @@ export async function tokenize(str: string, options: Expand.Options) {
}
const close = async () => {
- const op = ptr.state.op;
- ptr.state.sourceMap = [offset, i + (ptr.state.terminal && ptr.state.terminal.length) - offset];
- ptr.state.op = null;
- ptr.state.terminal = null;
+ const { op, terminal, escape, escaped } = frame;
+ frame.sourceMap = [offset, i + (terminal && terminal.length) - offset];
+ frame.op = null;
+ frame.terminal = null;
+ if (escape && escaped) {
+ frame.escape = false;
+ frame.escaped = '';
+ append(escaped);
+ }
+ const swap = reduce(frame.out, frame.source);
+ let out: any;
let res;
- const swap = reduce(ptr.subst, ptr.source);
if (check(swap, [Object, Array])) {
- res = await call(swap);
+ out = await call(swap);
} else {
if (op == 'v') {
- res = await sub(dereference, swap, ptr.state.sourceMap);
+ out = await sub(dereference, swap, frame.sourceMap);
} else if (op == 's') {
- res = await sub(shell, swap, ptr.state.sourceMap);
+ out = await sub(shell, swap, frame.sourceMap);
} else if (op == 'f') {
- res = await sub(fetch, swap, ptr.state.sourceMap);
+ out = await sub(fetch, swap, frame.sourceMap);
} else if (op == 'e') {
- const deref = (str: string) => subSync(dereferenceSync, str, ptr.state.sourceMap)
- res = await sub((s) => expression(deref, check).parse(s), swap, ptr.state.sourceMap)
+ const deref = (str: string) => subSync(dereferenceSync, str, frame.sourceMap)
+ out = await sub((s) => expression(deref, check).parse(s), swap, frame.sourceMap)
+ } else if (op == 'n') {
+ if (terminal != '__null__') append(terminal);
+ out = reduce(frame.out, frame.source);
}
}
- if (y > 0) {
- delete stack[x][y];
- y--;
- ptr = stack[x][y];
- ptr.subst.push(res);
- }
- else {
- if (res) { ptr.state.dirty = true };
- ptr.raw.push(res);
- x++;
- y = 0;
- stack[x] = [newState()];
- ptr = stack[x][y];
- }
+ // if (out) frame.dirty = true;
+ // const { out } = frame;
+ // const out = reduce(frame.out, frame.source);
+ // delete stack[y];
+ stack.length--
+ y--;
+ frame = stack[y];
+ append(out);
+ // frame.source.push(out);
}
for (i = 0; i != template.length; i++) {
const char = template[i];
- if (ptr.state.escape) {
- ptr.state.escape = false;
- append(char);
- } else {
- const { detecting, header, op, terminal } = ptr.state;
- switch (char) {
- case '(':
- if (detecting && (detecting == '$')) {
- open('s', ')');
- break;
- } else {
- append(char);
- }
- break;
- case '{':
- if (detecting) {
- open(detecting == '$' ? 'v' : detecting == '^' ? 'f' : 'e', '}');
- break;
- } else {
- append(char);
- }
- break;
- case '}': case ')':
- if (op && terminal == char) {
- await close();
- } else {
- append(char);
- }
+ const { detecting, header, op, terminal, escape, escaped } = frame;
+ switch (char) {
+ case '(':
+ if (detecting && (detecting == '$')) {
+ open(char, 's', ')');
break;
- case ' ':
- if (op && terminal == char) {
- await close();
- }
+ } else {
append(char);
+ }
+ break;
+ case '{':
+ if (detecting) {
+ open(char, detecting == '$' ? 'v' : detecting == '^' ? 'f' : 'e', '}');
break;
- case '\\':
- ptr.state.escape = true;
- break;
- default:
- if (header) {
- ptr.state.header = null;
- if (header == '=') open('e', '__null__');
- else if (header == '^') open('f', '__null__');
- else if (header == '$') open('v', ' ');
- } else if (char == '=' || char == '$' || char == '^') {
- if (i < 1) ptr.state.header = char;
- ptr.state.detecting = char;
- }
+ } else {
+ append(char);
+ }
+ break;
+ case '}': case ')':
+ if (op && terminal == char) {
+ await close();
+ } else {
+ append(char);
+ }
+ break;
+ case ' ':
+ if (op && terminal == char) {
+ await close();
+ }
+ append(char);
+ break;
+ case '\\':
+ if (frame.escape) {
+ append(char);
+ } else {
+ frame.escape = true;
+ frame.escaped = '\\';
+ }
+ break;
+ case '=': case '$': case '^':
+ if (detecting) {
+ append(detecting);
+ }
+ if (!frame.out[0] || (frame.out[0].length < 2)) {
+ frame.header = char;
+ }
+ if (frame.escape) {
+ append(char);
+ }
+ frame.detecting = char;
+ break;
+ default:
+ if (header) {
+ frame.header = null;
+ if (header == '=') open(null, 'e', '__null__');
+ else if (header == '^') open(null, 'f', '__null__');
+ else if (header == '$') open(null, 'v', '__null__');
+ } else {
if (detecting) {
- ptr.state.detecting = null;
+ frame.detecting = null;
+ append(detecting);
}
- append(char);
- break;
- }
+ if (escape) {
+ frame.escape = false;
+ frame.escaped = null;
+ // console.log('append escaped', escaped)
+ append(escaped);
+ }
+ }
+ append(char);
+ break;
}
}
- while (ptr.state.op) {
- if (ptr.state.terminal == '}') throw { message: `expected a closing ${ptr.state.terminal}` }
+ if (frame.detecting) {
+ append(frame.detecting);
+ }
+ while (frame.op) {
+ if (frame.terminal == '}') throw { message: `expected a closing ${frame.terminal}` }
await close();
}
- if (ptr.state.detecting) {
- ptr.state.detecting = null;
+ if (frame.detecting) {
+ frame.detecting = null;
}
return stack;
};
diff --git a/src/interpolate/index.ts b/src/interpolate/index.ts
index b1be6d6..b7697f0 100644
--- a/src/interpolate/index.ts
+++ b/src/interpolate/index.ts
@@ -1,4 +1,8 @@
import { interpolate as interpolateAsync } from './async'
import { interpolate } from './sync';
-export { interpolate, interpolateAsync };
\ No newline at end of file
+export { interpolate, interpolateAsync };
+
+const jsonSchemaKeys = ['id', 'schema', 'ref', 'comment'];
+const mongoKeys = ['set', 'unset', 'push', 'pull', 'gt', 'lt', 'gte', 'lte', 'exists'];
+export const reservedKeys = jsonSchemaKeys.concat(mongoKeys);
diff --git a/src/interpolate/shared.ts b/src/interpolate/shared.ts
index 399b545..ed54194 100644
--- a/src/interpolate/shared.ts
+++ b/src/interpolate/shared.ts
@@ -110,9 +110,11 @@ export const pop = (stack: any[]) => {
const lastIndex = (stack.length - 1) || 0;
if (check(stack[lastIndex], String)) {
const s = stack[lastIndex];
+ const l = s.slice(-1);
stack[lastIndex] = s.slice(0, s.length - 1);
+ return l;
} else {
- stack.pop();
+ return stack.pop();
}
}
@@ -127,19 +129,14 @@ export const reduce = (raw: any[], source: any[]) => {
return res;
}
-export function newState(): Expand.Elem {
- return { state: { sourceMap: [] }, raw: [], subst: [], source: [] };
+export function newState(options?: Partial): Expand.Elem {
+ return { sourceMap: [], out: [], source: [], ...options };
}
-export function parse(tokens: Expand.Elem[][]) {
+export function parse(tokens: Expand.Elem[]) {
let out = '';
let outSource = '';
let changed = false;
- for (const e of tokens) {
- const flat = reduce(e[0].raw, e[0].source);
- out = join(out, flat, outSource, e[0].source[0]);
- outSource = e[0].source.join('');
- if (e[0].state.dirty) changed = true;
- }
+ out = reduce(tokens[0].out, tokens[0].source);
return { value: out, changed: changed };
}
\ No newline at end of file
diff --git a/src/interpolate/sync.ts b/src/interpolate/sync.ts
index 5c6aed1..47420a2 100644
--- a/src/interpolate/sync.ts
+++ b/src/interpolate/sync.ts
@@ -1,55 +1,52 @@
import { check } from 'typed-json-transform';
const expression = require('../../compiled/expression');
-import { newState, parse, reduce, append as _append, pop } from './shared';
+import { newState, parse, reduce, append as _append } from './shared';
export function tokenize(str: string, options: Expand.Options) {
- const { dereference, call, shell, fetch } = options;
+ const { dereference, dereferenceSync, call, shell, fetch } = options;
const template = String(str);
let i = 0;
let offset = i;
let x = 0;
let y = 0;
- const stack: Expand.Elem[][] = [[{ state: { sourceMap: [0, str.length] }, raw: [], subst: [], source: [] }]];
- let ptr = stack[x][y];
+ const stack: Expand.Elem[] = [newState()];
+ let frame = stack[y];
-
- const append = (char: string) => {
- let nextChunk = false;
- if (ptr.state.op) {
- nextChunk = _append(ptr.subst, char)
- } else {
- nextChunk = _append(ptr.raw, char)
+ const append = (val: any) => {
+ if (frame.escape) {
+ frame.escaped += val;
+ return;
}
+ let nextChunk = false;
+ nextChunk = _append(frame.out, val)
if (nextChunk) {
- ptr.source.push(char);
+ frame.source.push(val);
} else {
- ptr.source[(ptr.source.length - 1) || 0] += char;
+ frame.source[(frame.source.length - 1) || 0] += val;
}
}
- const stepBack = () => {
- if (ptr.state.op) {
- pop(ptr.subst);
- } else {
- pop(ptr.raw);
- }
- }
-
- const open = (op: Expand.Op, terminal: Expand.Terminal) => {
- stepBack();
- ptr.state.header = null;
- ptr.state.detecting = null;
- const existing = ptr.state.op;
- if (existing) {
- y++;
- stack[x][y] = newState();
- ptr = stack[x][y];
- ptr.raw = [];
+ const open = (char: string, op: Expand.Op, terminal: Expand.Terminal) => {
+ const { escape, escaped } = frame;
+ if (escape) {
+ frame.escape = null;
+ frame.escaped = '';
+ const directive = escaped.slice(1);
+ if (!directive){
+ throw new Error('explicit interpolate without 2 char prefix directive');
+ }
+ append(directive);
+ if (char) {
+ append(char);
+ }
}
- ptr.state.op = op;
- ptr.state.terminal = terminal;
+ frame.header = null;
+ frame.detecting = null;
+ y++;
+ stack[y] = newState({ op: escape ? 'n' : op, terminal });
+ frame = stack[y];
}
const sub = (fn: (s: string, location: any) => any, str: string, sourceMap?: number[]) => {
@@ -85,106 +82,130 @@ export function tokenize(str: string, options: Expand.Options) {
}
const close = () => {
- const op = ptr.state.op;
- ptr.state.sourceMap = [offset, i + (ptr.state.terminal && ptr.state.terminal.length) - offset];
- ptr.state.op = null;
- ptr.state.terminal = null;
+ const { op, terminal, escape, escaped } = frame;
+ frame.sourceMap = [offset, i + (terminal && terminal.length) - offset];
+ frame.op = null;
+ frame.terminal = null;
+ if (escape && escaped) {
+ frame.escape = false;
+ frame.escaped = '';
+ append(escaped);
+ }
+ const swap = reduce(frame.out, frame.source);
+ let out: any;
let res;
- const swap = reduce(ptr.subst, ptr.source);
if (check(swap, [Object, Array])) {
- res = call(swap);
+ out = call(swap);
} else {
if (op == 'v') {
- res = sub(dereference, swap, ptr.state.sourceMap);
+ out = sub(dereference, swap, frame.sourceMap);
} else if (op == 's') {
- res = sub(shell, swap, ptr.state.sourceMap);
+ out = sub(shell, swap, frame.sourceMap);
} else if (op == 'f') {
- res = sub(fetch, swap, ptr.state.sourceMap);
+ out = sub(fetch, swap, frame.sourceMap);
} else if (op == 'e') {
- const deref = (str: string) => subSync(dereference, str, ptr.state.sourceMap)
- res = sub((s) => expression(deref, check).parse(s), swap, ptr.state.sourceMap)
+ const deref = (str: string) => subSync(dereferenceSync, str, frame.sourceMap)
+ out = sub((s) => expression(deref, check).parse(s), swap, frame.sourceMap)
+ } else if (op == 'n') {
+ if (terminal != '__null__') append(terminal);
+ out = reduce(frame.out, frame.source);
}
}
- if (y > 0) {
- delete stack[x][y];
- y--;
- ptr = stack[x][y];
- ptr.subst.push(res);
- }
- else {
- if (res) { ptr.state.dirty = true };
- ptr.raw.push(res);
- x++;
- y = 0;
- stack[x] = [newState()];
- ptr = stack[x][y];
- }
+ // if (out) frame.dirty = true;
+ // const { out } = frame;
+ // const out = reduce(frame.out, frame.source);
+ // delete stack[y];
+ stack.length--
+ y--;
+ frame = stack[y];
+ append(out);
+ // frame.source.push(out);
}
for (i = 0; i != template.length; i++) {
const char = template[i];
- if (ptr.state.escape) {
- ptr.state.escape = false;
- append(char);
- } else {
- const { detecting, header, op, terminal } = ptr.state;
- switch (char) {
- case '(':
- if (detecting) {
- open('s', ')');
- break;
- } else {
- append(char);
- }
+ const { detecting, header, op, terminal, escape, escaped } = frame;
+ switch (char) {
+ case '(':
+ if (detecting && (detecting == '$')) {
+ open(char, 's', ')');
break;
- case '{':
- if (detecting) {
- open(detecting == '$' ? 'v' : detecting == '^' ? 'f' : 'e', '}');
- break;
- } else {
- append(char);
- }
- break;
- case '}': case ')':
- if (op && terminal == char) {
- close();
- } else {
- append(char);
- }
- break;
- case ' ':
- if (op && terminal == char) {
- close();
- }
+ } else {
append(char);
+ }
+ break;
+ case '{':
+ if (detecting) {
+ open(char, detecting == '$' ? 'v' : detecting == '^' ? 'f' : 'e', '}');
break;
- case '\\':
- ptr.state.escape = true;
- break;
- default:
- if (header) {
- ptr.state.header = null;
- if (header == '=') open('e', '__null__');
- else if (header == '^') open('f', '__null__');
- else if (header == '$') open('v', ' ');
- } else if (char == '=' || char == '$' || char == '^') {
- if (i < 1) ptr.state.header = char;
- ptr.state.detecting = char;
- }
+ } else {
+ append(char);
+ }
+ break;
+ case '}': case ')':
+ if (op && terminal == char) {
+ close();
+ } else {
+ append(char);
+ }
+ break;
+ case ' ':
+ if (op && terminal == char) {
+ close();
+ }
+ append(char);
+ break;
+ case '\\':
+ if (frame.escape) {
+ append(char);
+ } else {
+ frame.escape = true;
+ frame.escaped = '\\';
+ }
+ break;
+ case '=': case '$': case '^':
+ if (detecting) {
+ append(detecting);
+ }
+ if (!frame.out[0] || (frame.out[0].length < 2)) {
+ frame.header = char;
+ }
+ if (frame.escape) {
+ append(char);
+ }
+ frame.detecting = char;
+ break;
+ default:
+ if (header) {
+ frame.header = null;
+ if (header == '=') open(null, 'e', '__null__');
+ else if (header == '^') open(null, 'f', '__null__');
+ else if (header == '$') open(null, 'v', '__null__');
+ } else {
if (detecting) {
- ptr.state.detecting = null;
+ frame.detecting = null;
+ append(detecting);
}
- append(char);
- break;
- }
+ if (escape) {
+ frame.escape = false;
+ frame.escaped = null;
+ // console.log('append escaped', escaped)
+ append(escaped);
+ }
+ }
+ append(char);
+ break;
}
}
- while (ptr.state.op) {
- if (ptr.state.terminal == '}') throw { message: `expected a closing ${ptr.state.terminal}` }
+ if (frame.detecting) {
+ append(frame.detecting);
+ }
+ while (frame.op) {
+ if (frame.terminal == '}') throw { message: `expected a closing ${frame.terminal}` }
close();
}
- if (ptr.state.detecting) {
- ptr.state.detecting = null;
+ if (frame.detecting) {
+ frame.detecting = null;
}
return stack;
};
diff --git a/src/interpolate/expression.pegjs b/src/parsers/expression.pegjs
similarity index 100%
rename from src/interpolate/expression.pegjs
rename to src/parsers/expression.pegjs
diff --git a/src/parsers/moss.ne b/src/parsers/moss.ne
new file mode 100644
index 0000000..a12cf4e
--- /dev/null
+++ b/src/parsers/moss.ne
@@ -0,0 +1,884 @@
+@lexer lexer
+
+start
+ -> sof rootScope eof {% ([sof, scope]) => scope %}
+
+rootScope
+ -> map {% id %}
+ | (sol eol "string") multilineString ("\/string") {% ([sol, scope]) => scope %}
+
+scope
+ -> map {% ([layer]) => layer.data %}
+
+map
+ -> map mapPairConstructor
+ {% ([_layer, nextMatch]) => {
+ const layer = {
+ data: new Map(_layer.data),
+ context: {}
+ }
+ if (nextMatch && (nextMatch[0] !== undefined)) {
+ addPairToMap(nextMatch, layer.data)
+ }
+ return layer;
+ } %}
+ | map mapList {% ([_layer, list]) => {
+ const layer = {
+ data: new Map(_layer.data),
+ context: {}
+ }
+ if (list && list.length) {
+ for (let i = 0; i < list.length; i++){
+ addPairToMap([i, list[i]], layer.data)
+ }
+ }
+ return layer;
+ } %}
+ | mapPairConstructor
+ {% ([initialMatch]) => {
+ const layer = {
+ data: new Map(),
+ context: {}
+ }
+ if (initialMatch && (initialMatch[0] !== undefined)) {
+ addPairToMap(initialMatch, layer.data)
+ }
+ return layer;
+ } %}
+ | mapList
+ {% ([list]) => {
+ const layer = {
+ data: new Map(),
+ context: {}
+ }
+ if (list && list.length) {
+ for (let i = 0; i < list.length; i++){
+ addPairToMap([i, list[i]], layer.data)
+ }
+ }
+ return layer;
+ } %}
+
+mapList
+ -> (sol "-<" endLine) list "\/-<" {% ([prefix, list]) => list %}
+
+mapPairConstructor
+ # nested explicitly declared list
+ -> key ((space constraintMap) | space) ("-<" pushScope) list "\/-<" popScope
+ {% ([key, context, mode, scope]) => {
+ if (context){
+ return [key, scope, {multiLineString: true, ...context[1]}]
+ } else {
+ return [key, scope, {multiLineString: true}]
+ }
+ } %}
+
+ # multiline string
+ | key ((space constraintMap) | space) (eol "text" indent) multilineString popScope "\/text"
+ {% ([key, context, mode, scope]) => {
+ if (context){
+ return [key, scope, {multiLineString: true, ...context[1]}]
+ } else {
+ return [key, scope, {multiLineString: true}]
+ }
+ } %}
+
+ # nested map
+ | key pushTypedScope scope popScope
+ {% ([key, context, scope]) => {
+ return [key, scope]
+ } %}
+
+ # explicit map pair, rhs is a map
+ | key ((space constraintMap) | space) "{" scope "}" endLine
+ {% ([key, context, bracket, scope]) => {
+ return [key, scope]
+ } %}
+
+ # default map pair, rhs is a statement
+ | key ((space constraintMap) | space) statement mapTerminator
+ {% ([key, context, statement]) => {
+ console.log('pair', [key, statement])
+ return [key, statement]
+ } %}
+
+ # default simple value
+ | (sol | space) (constraintMap):? statement mapTerminator
+ {% ([prefix, constraintMap, statement]) => {
+ return [statement, true]
+ }%}
+
+ | sol eol {% () => null %}
+ | sol comment {% () => null %}
+ # error cases
+ | literal pushScope scope
+ {% expectedScopeOperator %}
+
+mapTerminator
+ -> (" " | "," | endLine) {% id %}
+
+listTerminator
+ -> ("," | endLine) {% id %}
+
+
+list
+ -> list listConstructor
+ {% ([array, item]) => {
+ if (item){
+ return [...array, item];
+ }
+ return array;
+ } %}
+ | listConstructor
+ {% ([item]) => {
+ return [ item ];
+ } %}
+
+listConstructor
+ # nested constrained scope
+ -> key pushTypedScope scope popScope
+ {% ([key, context, scope]) => {
+ return scope
+ } %}
+
+ | key ((space constraintMap) | space) "{" scope "}" endLine
+ {% ([key, context, bracket, scope]) => {
+ return scope
+ } %}
+
+ # default map pair, rhs is a statement
+ | key ((space constraintMap) | space) statement listTerminator
+ {% ([key, context, statement]) => {
+ return statement
+ } %}
+
+ # default simple value
+ | (sol | space) (constraintMap):? statement listTerminator
+ {% ([prefix, constraintMap, statement]) => {
+ return statement
+ }%}
+
+ | sol eol {% () => null %}
+ | sol comment {% () => null %}
+
+multilineString
+ -> stringLine stringLine:* {% ([head, tail]) => {
+ const [startIndent, mls] = head;
+ if (tail.length){
+ const res = tail.map(([indent, line]) => {
+ let margin = '';
+ if (indent > startIndent){
+ for (let i = 0; i < indent - startIndent; i++){
+ margin = margin + ' ';
+ }
+ }
+ if (line){
+ return margin + line;
+ }
+ return margin;
+ });
+ return [mls, ...res].join('\n');
+ }
+ return mls;
+ } %}
+
+stringLine
+ -> indent multilineString dedent
+ {% ([indent, mls]) => {
+ return [indent.indent, mls];
+ } %}
+ | sol _escapedString:? eol
+ {% ([sol, string]) => {
+ return [sol.indent, string];
+ } %}
+
+
+pushTypedScope ->
+ space constraintMap indent
+ {% ([space, constraintMap]) => constraintMap %}
+ | pushScope {% id %}
+
+
+constraintMap
+ -> constraintMap constraint
+ {% ([map, nextMatch]) => {
+ if (nextMatch) {
+ addPairToMap(nextMatch, map);
+ }
+ return map;
+ } %}
+ | constraint
+ {% ([initialMatch]) => {
+ const map = new Map();
+ if (initialMatch) {
+ addPairToMap(initialMatch, map);
+ }
+ return map;
+ } %}
+
+constraint
+ -> "@" "{" nestedScope sol "}" (space | endLine)
+ {% ([directive, bracket, scope]) => scope %}
+ | "@" literal "{" scope "}" (space | endLine)
+ {% ([directive, literal, bracket, scope]) => [literal, scope] %}
+ | "@" literal (space | endLine) {% ([directive, property]) => {
+ return [property, true]
+ }%}
+
+# Map
+key
+ -> (sol | space) keyExpression ":" {% ([pre, key]) => key %}
+
+keyExpression
+ -> ( "=" | "+" | "|" | "&" | "^" | "-" ) space statement {% reduce %}
+ | concat {% id %}
+
+# statement
+statement
+ -> concat {% id %}
+
+# Operators
+
+concat
+ -> concat space boolean {% reduce %}
+ | boolean {% id %}
+
+boolean
+ -> boolean space ( "n" | "|" ) space add {% reduce %}
+ | add {% id %}
+
+add
+ -> add space ( "+"|"-" ) space multiply {% reduce %}
+ | multiply {% id %}
+
+multiply
+ -> multiply space ("*"|"/") space unaryPrefix {% reduce %}
+ | unaryPrefix {% id %}
+
+unaryPrefix
+ -> "+" group {% reduce %}
+ | "-" group {% reduce %}
+ | "!" group {% reduce %}
+ | group {% id %}
+
+group
+ -> "(" concat ")" {% reduce %}
+ | literal {% id %}
+
+# Operators
+directive
+ -> "@" {% () => '@' %}
+
+# Formatting
+nestedScope
+ -> pushScope scope popScope {% ([push, scope]) => scope %}
+
+pushScope
+ -> (inlineComment | eol) indent {% id %}
+
+popScope
+ -> dedent {% id %}
+
+endLine
+ -> inlineComment {% id %}
+ | eol {% id %}
+
+inlineComment
+ -> space comment {% id %}
+
+comment
+ -> "/" "/" _escapedString:? %eol {% ([operator, comment]) => (comment) %}
+
+# Numbers
+
+number
+ -> _number {% ([n]) => parseFloat(n) %}
+
+_number
+ -> _float "e" digit {% reduce %}
+ | _float {% id %}
+
+_float
+ -> digit "." digit {% reduce %}
+ | digit {% id %}
+
+digit
+ -> digit [0-9] {% concat %}
+ | [0-9] {% ([tok]) => tok %}
+
+# Words
+
+literal
+ -> string {% id %}
+ | singleWord {% id %}
+ | uri {% id %}
+ | number {% id %}
+
+# URL = scheme:[//authority]path[?query][#fragment]
+uri
+ -> url {% id %}
+ | authority {% id %}
+
+url
+ -> urlDomainScheme authority {% reduce %}
+ | urlScheme uriPathComponent {% reduce %}
+ | urlScheme urlPath {% reduce %}
+
+urlDomainScheme
+ -> urlScheme "/" "/" {% reduce %}
+
+urlSchemes
+ -> urlSchemes urlScheme {% reduce %}
+ | urlScheme {% id %}
+
+urlScheme
+ -> domainComponent ":" {% reduce %}
+
+authority
+ -> urlCredentials "@" _authority {% reduce %}
+ | _authority {% reduce %}
+
+_authority
+ -> uriDomainComponent uriPathComponent:? uriQueries:? uriFragment:? {% reduce %}
+
+uriQueries
+ -> uriQueries uriQuery {% reduce %}
+ | uriQuery {% id %}
+
+uriPathComponent
+ -> "/" urlPath {% reduce %}
+ | "/" {% ([tok]) => tok.value %}
+
+urlCredentials
+ -> urlCredentials ":" password {% reduce %}
+ | email {% id %}
+ | subdomain {% id %}
+
+urlPath
+ -> urlPath "/" urlPathName {% reduce %}
+ | urlPath "/" {% reduce %}
+ | urlPathName {% id %}
+
+urlPathName ->
+ urlPathName "." urlPathWord {% reduce %}
+ | urlPathWord {% id %}
+
+urlPathWord
+ -> urlPathWord urlPathChar {% reduce %}
+ | urlPathChar {% id %}
+
+urlPathChar
+ -> [^ ^/^.^?^;] {% ([tok]) => tok.value %}
+
+filePath ->
+ filePath "/" fileName {% reduce %}
+ | fileName {% id %}
+
+fileName ->
+ fileName "." fileWord {% reduce %}
+ | fileWord {% id %}
+
+fileWord
+ -> fileWord fileChar {% reduce %}
+ | fileChar {% id %}
+
+fileChar
+ -> [^ ^/^.] {% ([tok]) => tok.value %}
+
+password
+ -> urlSafePlusEncoded {% reduce %}
+
+email
+ -> subdomain "@" domain {% reduce %}
+
+uriDomainComponent
+ -> uriDomainComponent uriPortComponent {% reduce %}
+ | domain {% reduce %}
+ | "[" ipv6 "]" {% reduce %}
+ | ipv4 {% id %}
+
+matchSeven[x]
+ -> $x $x $x $x $x $x $x {% reduce %}
+
+matchOneToSeven[x]
+ -> $x $x $x $x $x $x $x {% reduce %}
+ | $x $x $x $x $x $x {% reduce %}
+ | $x $x $x $x $x {% reduce %}
+ | $x $x $x $x {% reduce %}
+ | $x $x $x $x {% reduce %}
+ | $x $x $x {% reduce %}
+ | $x $x {% reduce %}
+ | $x {% reduce %}
+
+ipv6
+ -> matchSeven[ipv6Group] ipv6Number {% reduce %}
+ | matchOneToSeven[ipv6Group] ":" ipv6Number {% reduce %}
+
+matchOneToFour[x]
+ -> $x $x $x $x {% reduce %}
+ | $x $x $x {% reduce %}
+ | $x $x {% reduce %}
+ | $x {% reduce %}
+
+ipv6Group
+ -> ipv6Number ":" {% reduce %}
+
+ipv6Number
+ -> matchOneToFour[hexDigit]
+
+ipv4
+ -> ipv4Group "." ipv4Group "." ipv4Group "." ipv4Group
+
+ipv4Group
+ -> d2 d5 d0_5 {% reduce %}
+ | d2 d0_4 d0_9 {% reduce %}
+ | d1 d0_9 d0_9 {% reduce %}
+ | d0_9 d0_9 {% reduce %}
+ | d0_9 {% id %}
+
+d1 -> "1" {% ([tok]) => tok %}
+d2 -> "2" {% ([tok]) => tok %}
+d5 -> "5" {% ([tok]) => tok %}
+d0_4 -> [0-4] {% ([tok]) => tok %}
+d0_5 -> [0-5] {% ([tok]) => tok %}
+d0_9 -> [0-9] {% ([tok]) => tok %}
+
+domain
+ -> subdomain "." domainComponent {% reduce %}
+
+uriPortComponent
+ -> ":" number {% reduce %}
+
+subdomain ->
+ domainComponent "." subdomain {% reduce %}
+ | domainComponent {% id %}
+
+# ! $ & ' ( ) * + , ; =
+# are permitted by generic URI syntax to be used unencoded
+# in the user information, host, and path as delimiters.
+
+uriQuery
+ -> "?" queryList {% reduce %}
+
+queryList
+ -> queryList "&" queryFragment {% reduce %}
+ | queryFragment {% id %}
+
+queryFragment
+ -> queryFragment "=" urlSafePlusEncoded {% reduce %}
+ | urlSafePlusEncoded {% id %}
+
+uriFragment
+ -> "#" queryList {% reduce %}
+
+domainComponent
+ -> [a-zA-Z] [a-zA-Z0-9\-]:*
+ {% optionalTail %}
+
+singleWord
+ -> [a-zA-Z$_] [a-zA-Z0-9$_]:*
+ {% optionalTail %}
+
+word
+ -> word wordSafeChar {% concat %}
+ | wordStartChar {% id %}
+
+wordSafeChar
+ -> wordStartChar {% id %}
+ | [0-9] {% ([tok]) => tok.value %}
+
+wordStartChar
+ -> [a-zA-Z$_] {% ([tok]) => tok.value %}
+
+string
+ -> "`" _escapedString "`" {% function(d) {return d[1]; } %}
+
+_string
+ -> null {% function() {return ""; } %}
+ | _string _stringchar {% ([lhs, rhs]) => lhs + rhs %}
+
+_stringchar
+ -> [^\\"] {% id %}
+ | "\\" [^] {% concat %}
+
+urlSafePlusEncoded
+ -> urlSafePlusEncoded urlSafePlusEncodedChars {% reduce %}
+ | urlSafePlusEncodedChars {% id %}
+
+urlSafePlusEncodedChars
+ -> "%" hexDigit hexDigit {% reduce %}
+ | "&" "a" "m" "p" ";" {% reduce %}
+ | urlSafeChar {% id %}
+
+hexDigit -> [0-9a-fA-F] {% id %}
+
+urlSafe
+ -> urlSafe urlSafeChar {% concat %}
+ | urlSafeChar {% id %}
+
+urlSafeChar -> [a-zA-Z0-9\-] {% ([tok]) => tok.value %}
+
+
+
+chunk
+ -> chunk chunkChar {% concat %}
+ | chunkChar {% id %}
+
+chunkChar
+ -> [a-zA-Z0-9@+\-*?|/()\\:] {% ([tok]) => tok.value %}
+
+_escapedString
+ -> _escapedString escapedChar {% concat %}
+ | escapedChar {% id %}
+
+escapedChar
+ -> %space {% ([tok]) => tok.value %}
+ | %any {% ([tok]) => tok.value %}
+
+# syntactic whitespace
+sof -> %sof {% ([tok]) => tok.value %}
+eof -> %eof {% ([tok]) => tok.value %}
+sol -> %sol {% ([tok]) => tok %}
+eol -> _ %eol {% ([ws, tok]) => tok %}
+indent
+ -> %indent {% ([tok]) => tok %}
+dedent
+ -> %dedent {% ([tok]) => tok %}
+space -> %space {% ([tok]) => tok.value %}
+
+# ignored whitespace or chars
+_
+ -> _ space {% ([e]) => {
+ return e ? e + ' ': '';
+ } %}
+ | null {% () => '' %}
+
+@{%
+// Lexer
+
+const makeToken = (type, text, sourceMap, indent) => ({...sourceMap, type, text, value: text, indent, toString: () => text});
+
+const makeSol = (sourceMap, indent) => {
+ const t = makeToken('sol', '\n', sourceMap, indent);
+ //console.log(t);
+ return t
+}
+const makeEol = (sourceMap, indent) => makeToken('eol', '\n', sourceMap, indent)
+
+const makeIndent = (sourceMap, indent) => makeToken('indent', 'indent', sourceMap, indent)
+const makeDedent = (sourceMap, indent) => makeToken('dedent', 'dedent', sourceMap, indent)
+
+const makeSof = () => makeToken('sof', 'sof');
+const makeEof = () => makeToken('eof', 'eof');
+
+const doDedent = (ruleMap, indent, nextIndent, sourceMap) => {
+ const tokens = [makeEol(sourceMap, indent)];
+ const ruleToken = ruleMap.get(indent);
+ if (ruleToken) {
+ tokens.push(makeToken('stopRule', `/${ruleToken.text}`, sourceMap, indent));
+ ruleMap.delete(indent)
+ }
+ tokens.push(makeDedent(sourceMap, nextIndent));
+ tokens.push(makeSol(sourceMap, nextIndent));
+ return tokens;
+}
+
+function* indented(lexer, source, info) {
+ let iter = peekable(lexer.reset(source, info))
+ let stack = []
+ let ruleMap = new Map();
+
+ // absorb initial blank lines and indentation
+ let indent = iter.nextIndent()
+
+ yield makeSof();
+ yield makeSol(indent);
+
+ for (let tok; tok = iter.next(); ) {
+ const sourceMap = {line: tok.line, col: tok.col};
+
+ if (tok.type === 'eol' || tok.type === 'startRule') {
+ const newIndent = iter.nextIndent()
+ if (newIndent == null) {
+ break;
+ }// eof
+ else if (newIndent === indent) {
+ if (tok.type === 'startRule'){
+ const ruleToken = makeToken('startRule', tok.text.slice(0, tok.text.indexOf('<') + 1));
+ ruleMap.set(indent, ruleToken);
+ yield ruleToken;
+ }
+ yield makeEol(indent, sourceMap);
+ yield makeSol(sourceMap, indent);
+ } else if (newIndent > indent) {
+ stack.push(indent)
+ indent = newIndent
+ if (tok.type === 'startRule'){
+ const ruleToken = makeToken('startRule', tok.text.slice(0, tok.text.indexOf('<') + 1));
+ ruleMap.set(indent, ruleToken);
+ yield ruleToken;
+ }
+ yield makeEol(sourceMap, indent);
+ yield makeIndent(sourceMap, indent)
+ yield makeSol(sourceMap, indent);
+ } else if (newIndent < indent){
+ while (newIndent < indent) {
+ const nextIndent = stack.pop();
+ const dedentTokens = doDedent(ruleMap, indent, nextIndent, sourceMap);
+ for (const t of dedentTokens){
+ yield t;
+ }
+ indent = nextIndent;
+ }
+ if (newIndent !== indent) {
+ throw new Error(`inconsistent indentation ${newIndent} != ${indent}`)
+ }
+ } else {
+ yield makeEol(sourceMap, indent);
+ yield makeSol(sourceMap, indent);
+ }
+ indent = newIndent
+ } else {
+ yield { ...tok, indent: indent}
+ }
+ }
+
+ // dedent remaining blocks at eof
+ for (let i = stack.length; i--;) {
+ const nextIndent = stack.pop() || 0;
+ const dedentTokens = doDedent(ruleMap, indent, nextIndent, {line: 'eof', col: 'eof'});
+ for (const t of dedentTokens){
+ yield t;
+ }
+ indent = nextIndent;
+ }
+
+ yield makeEol();
+ const ruleToken = ruleMap.get(0);
+ if (ruleToken) {
+ yield makeToken('stopRule', `/${ruleToken.text}`);
+ ruleMap.delete(0)
+ }
+
+ yield makeEof();
+}
+
+function peekable(lexer) {
+ let here = lexer.next()
+ return {
+ next() {
+ const old = here
+ here = lexer.next()
+ return old
+ },
+ peek() {
+ return here
+ },
+ nextIndent() {
+ for (let tok; tok = this.peek(); ) {
+ if (tok.type === 'eol') {
+ this.next();
+ }
+ else if (tok.type === 'space') {
+ const indent = tok.value.length
+ const recur = (indent) => {
+ this.next()
+ const next = this.peek()
+ if (!next) return indent
+ if (next.type === 'eol') {
+ this.next()
+ return indent
+ } else if (next.type === 'space') {
+ return recur(indent + 1);
+ }
+ return indent
+ }
+ return recur(1);
+ }
+ return 0
+ }
+ },
+ }
+}
+
+const rules = {
+ space: /[ ]/,
+ startRule: {
+ match: /[a-zA-Z+\-`]+<[\n\r]|[a-zA-Z+\-`]+< \/\/.*[\n\r]/,
+ lineBreaks: true
+ },
+ eol: {match: /[\n\r]/, lineBreaks: true },
+ any: /[^\s]/
+};
+
+const printToken = (t) => {
+ switch (t.type){
+ case "eol": return "}";
+ case "eol": return "}";
+ case "space": return " ";
+ case "indent": return "->";
+ case "dedent": return "<-";
+ case "eof": return ">";
+ case "sof": return "<>";
+ case "sol": return "{";
+ default: return t.text;
+ }
+}
+
+function StreamLexer() {
+ this.lexer = moo.compile(rules);
+}
+
+StreamLexer.prototype.next = function() {
+ const tok = this.generator.next().value;
+ if (tok){
+ //console.log(printToken(tok), tok);
+ return tok;
+ }
+}
+
+StreamLexer.prototype.save = function() {
+}
+
+StreamLexer.prototype.getTokenTypes = function(source) {
+ const types = [];
+ const iter = indented( moo.compile(rules), source);
+ const arr = [];
+ for (const t of iter){
+ if (t.type == 'any'){
+ const back = arr.length ? arr[arr.length - 1] : null;
+ if (back && back.type == 'any'){
+ back.value += t.value;
+ back.text += t.text;
+ } else {
+ arr.push(t);
+ }
+ } else {
+ arr.push(t);
+ }
+ }
+ return arr.map(t => printToken(t))
+}
+
+StreamLexer.prototype.reset = function(source, info) {
+ console.log('tokens', this.getTokenTypes(source))
+ this.generator = indented(this.lexer, source, info);
+}
+
+StreamLexer.prototype.formatError = function(token) {
+ return this.lexer.formatError(token);
+}
+
+StreamLexer.prototype.has = function(name) {
+ if (name == 'indent') return true;
+ if (name == 'dedent') return true;
+ if (name == 'sof') return true;
+ if (name == 'sol') return true;
+ if (name == 'eof') return true;
+ if (name == 'eol') return true;
+ return this.lexer.has(name);
+}
+
+const lexer = new StreamLexer();
+
+// Errors
+function expectedListNotation(){
+ throw new Error("expected list notation");
+}
+
+function emptyScope(){
+ throw new Error("empty scope");
+}
+
+function expectedRhs(){
+ throw new Error("no value for rhs");
+}
+
+function expectedTerminator(){
+ throw new Error("missing map pair terminator");
+}
+
+function extraSpace(){
+ throw new Error("unused space at end of line");
+}
+
+function genericContextError(){
+ throw new Error("@context error");
+}
+
+function missingComma(){
+ throw new Error("missing comma");
+}
+
+function expectedScopeOperator(){
+ throw new Error("nested scope without scope operator");
+}
+
+function missingRhs(){
+ throw new Error("rhs of pair assignment missing");
+}
+
+function unknownOrEmpty(){
+ throw new Error("unknown or empty");
+}
+
+// Value Reducers
+
+const joinExpressionOperator = ([lhs, s1, op, s2, rhs]) => lhs + s1 + op + s2 + rhs
+const joinSeparatedChunks = ([lhs, op, rhs]) => lhs + op + rhs
+const concat = ([lhs, rhs]) => lhs + rhs
+const lhs = ([lhs, rhs]) => lhs
+const rhs = ([lhs, rhs]) => rhs
+const back = (d) => d[d.length - 1]
+
+function addPairToMap([key, value], map){
+ console.log('add to layer', [key, value], map);
+ if (map.get(key)){
+ throw new Error(`duplicate key ${key}`);
+ }
+ map.set(key, value);
+}
+
+function addPairToDataAndContext([key, data, context], [dataMap, contextMap]){
+ addPairToMap([key, data], dataMap);
+ addPairToMap([key, context], contextMap)
+}
+
+function join(list, rhs){
+ if (!list) return rhs;
+ if (!rhs) return list;
+ if (typeof list == 'string'){
+ return list + rhs;
+ }
+ return list + rhs;
+}
+
+function reduce(list){
+ if (list.length == 1){
+ return list[0];
+ }
+ let memo;
+ for (const item of list){
+ memo = join(memo, item);
+ }
+ return memo;
+}
+
+function optionalTail(list){
+ const [head, tail] = list;
+ if (tail && tail.length){
+ return head.value + reduce(tail);
+ }
+ return head.value; f
+}
+
+function map2Object(map){
+ const object = {};
+ for (const pair of map){
+ const [key] = pair;
+ object[key] = map.get(key);
+ }
+ return object;
+}
+
+%}
diff --git a/src/schema.ts b/src/schema.ts
new file mode 100644
index 0000000..3509f42
--- /dev/null
+++ b/src/schema.ts
@@ -0,0 +1,59 @@
+import { check, all, every, okmap, endsWith } from 'typed-json-transform';
+
+interface ValidateState {
+ data?: any
+ path?: string
+}
+
+
+export function validate(description: Moss.Schema.Options, state: ValidateState) {
+
+}
+
+export function parseDescription(description: Moss.Schema.Description): Moss.Schema.Options {
+ if (check(description, String)) {
+ return { type: description as string }
+ }
+ if (check(description, Array)) {
+ const descriptionArray = description as Moss.Schema.Description[];
+ // if (descriptionArray.length == 1) {
+ // return {
+ // type: "array",
+ // items: parseDescription(descriptionArray[0]),
+ // }
+ // }
+ // if (descriptionArray.length == 2 && (descriptionArray[0] == 'map')) {
+ // return {
+ // isMap: true,
+ // singleType: parseDescription(descriptionArray[1]),
+ // }
+ // }
+ return {
+ type: "array",
+ // isArray: true,
+ items: descriptionArray.map(d => parseDescription(d)),
+ }
+ }
+ if (check(description, Object)) {
+ const options: Moss.Schema.Options = description as any;
+ if (!options.type) {
+ return okmap(options, (d: Moss.Schema.Description, key: string) => {
+ return parseDescription(d)
+ });
+ }
+ return options;
+ }
+}
+
+export function validateArray(schema: Moss.Schema.Options, { data, path }: ValidateState) {
+
+}
+
+export function validateObject(current: any, { data, path }: ValidateState) {
+
+}
+
+export function validateScalar(current: any, { data, path }: ValidateState) {
+
+}
+
diff --git a/src/sync.ts b/src/sync.ts
index bff2c44..b3c208a 100644
--- a/src/sync.ts
+++ b/src/sync.ts
@@ -1,15 +1,16 @@
///
import {
- map as map, okmap as okmap, arrayify, extend,
+ map as map, okmap as okmapSync, okmap, arrayify, extend,
check, clone, each, setValueForKeyPath, sum, valueForKeyPath,
- all, isEqual, mergeArray, mergeOrReturnAssignment
+ all, isEqual, mergeArray, mergeOrReturnAssignment, contains
} from 'typed-json-transform';
-import { interpolate as __interpolate } from './interpolate';
+import { interpolate as __interpolate, reservedKeys } from './interpolate';
import { cascade as _cascade, shouldConstruct, select, parseSelectors } from './cascade';
import * as yaml from 'js-yaml';
import { getBranchSync as getBranch } from './resolvers';
+import { parseDescription } from './schema';
import {
newLayer,
@@ -73,14 +74,16 @@ export namespace Sync {
} else {
let val = source[_key];
if (_key[0] === '$') {
- key = (interpolate(current, _key)).data;
- } else if (_key[0] == '\\') {
- key = key.slice(1);
- } else if (_key.indexOf('.') != -1) {
- const [first, ...kp] = _key.split('.')
- key = first;
- val = {};
- setValueForKeyPath(source[_key], kp.join('.'), val);
+ if (!contains(reservedKeys, _key)) {
+ key = (interpolate(current, _key)).data;
+ } else {
+ key = _key;
+ }
+ // } else if (_key.indexOf('.') != -1) {
+ // const [first, ...kp] = _key.split('.')
+ // key = first;
+ // val = {};
+ // setValueForKeyPath(source[_key], kp.join('.'), val);
} else {
key = _key;
}
@@ -94,6 +97,9 @@ export namespace Sync {
}
currentErrorPath(state).path.pop();
}
+ if (current.state.schema) {
+ // check
+ }
return current;
}
@@ -126,8 +132,6 @@ export namespace Sync {
} catch (e) { handleError(e, layer, input) }
}
-
-
export const onMatch = (rv: Moss.ReturnValue, setter: any, operator: Merge.Operator, key: string) => {
let { state, data: lhs } = rv;
currentErrorPath(state).path.push(key);
@@ -214,6 +218,12 @@ export namespace Sync {
$: (current: Moss.ReturnValue, args: any) => {
parseNextStructure(current, args);
},
+ schema: (current: Moss.ReturnValue, args: any) => {
+ const description = continueWithNewFrame(current, args);
+ current.state.schema = parseDescription(description.data);
+ console.log(current.state.schema);
+ current.data = current.state.schema;
+ },
extend: (parent: Moss.ReturnValue, args: any) => {
const layer = continueWithNewFrame(parent, args);
const { data } = layer;
@@ -422,9 +432,13 @@ export namespace Sync {
}
});
+ const jsonSchemaKeywords = ['id', 'schema', 'ref', 'comment'];
+ const mongoKeywords = ['set', 'unset', 'push', 'pull', 'gt', 'lt', 'gte', 'lte', 'exists'];
+ const keywords = okmapSync(jsonSchemaKeywords.concat(mongoKeywords), (key) => ({ key, value: key }));
+
function interpolate(layer: Moss.ReturnValue, input: any) {
const { data, state } = layer;
- const dictionary = { ...state.auto, stack: state.stack }
+ const dictionary = { ...state.auto, ...keywords, stack: state.stack }
const res = _interpolate(layer, input, dictionary);
return { data: res, state: layer.state } as Moss.ReturnValue;
}
@@ -479,18 +493,25 @@ export namespace Sync {
path: sourceMap,
rhs: true
});
- const b = getBranch(uris, resolvers, layer);
- if (!b) {
+ let resolvedBranch;
+ try {
+ resolvedBranch = getBranch(uris, resolvers, layer);
+ } catch (e) {
+ throw ({
+ message: `Can't resolve ${uris}\n ${e.message}`,
+ })
+ }
+ if (!resolvedBranch) {
throw ({
message: `Can't resolve ${uris}\nNone of the available resolvers found a match.\n[${(map(resolvers, (r) => r.name)).filter(e => e).join(', ')}] `,
})
}
- if (b.data) {
+ if (resolvedBranch.data) {
popAll++;
- pushErrorPath(layer.state, { path: ['^' + b.path] })
- const res: Moss.ReturnValue = parseNextStructure(layer, b.data);
+ pushErrorPath(layer.state, { path: ['^' + resolvedBranch.path] })
+ const res: Moss.ReturnValue = parseNextStructure(layer, resolvedBranch.data);
const { data, state: { auto, stack, selectors, merge } } = res;
- b.intermediate = { data, state: { auto, stack, selectors, merge } };
+ resolvedBranch.intermediate = { data, state: { auto, stack, selectors, merge } };
return data;
}
},