Skip to content

Commit e36ed79

Browse files
chore: add TypeScript definition file for most API
This is following advice found in this comment: microsoft/TypeScript#8305 (comment)
1 parent bf80c83 commit e36ed79

File tree

6 files changed

+853
-0
lines changed

6 files changed

+853
-0
lines changed

lib/coffee-script/coffee-script.d.ts

+59
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
import SourceMap, { V3SourceMap } from './sourcemap';
2+
import { Token, LexerOptions } from './lexer';
3+
import { Block } from './nodes';
4+
5+
export const VERSION: string;
6+
export const FILE_EXTENSIONS: Array<string>;
7+
8+
interface CompileOptions {
9+
header?: boolean;
10+
shiftLine?: boolean;
11+
12+
// Source map options.
13+
sourceMap?: boolean;
14+
generatedFile?: string;
15+
sourceRoot?: string;
16+
sourceFiles?: Array<string>;
17+
inline?: boolean;
18+
}
19+
20+
type CompileResult = string | {
21+
js: string,
22+
sourceMap: SourceMap,
23+
v3SourceMap: V3SourceMap,
24+
};
25+
26+
interface RunOptions extends CompileOptions {
27+
filename?: string;
28+
}
29+
30+
/**
31+
* Compile CoffeeScript code to JavaScript, using the Coffee/Jison compiler.
32+
*
33+
* If `options.sourceMap` is specified, then `options.filename` must also be specified. All
34+
* options that can be passed to `SourceMap#generate` may also be passed here.
35+
*
36+
* This returns a javascript string, unless `options.sourceMap` is passed,
37+
* in which case this returns a `{js, v3SourceMap, sourceMap}`
38+
* object, where sourceMap is a sourcemap.coffee#SourceMap object, handy for doing programatic
39+
* lookups.
40+
*/
41+
export function compile(code: string, options?: CompileOptions): CompileResult;
42+
43+
/**
44+
* Tokenize a string of CoffeeScript code, and return the array of tokens.
45+
*/
46+
export function tokens(code: string, options?: LexerOptions): Array<Token>;
47+
48+
/**
49+
* Parse a string of CoffeeScript code or an array of lexed tokens, and
50+
* return the AST. You can then compile it by calling `.compile()` on the root,
51+
* or traverse it by using `.traverseChildren()` with a callback.
52+
*/
53+
export function nodes(source: string | Array<Token>, options?: LexerOptions): Block;
54+
55+
/**
56+
* Compile and execute a string of CoffeeScript (on the server), correctly
57+
* setting `__filename`, `__dirname`, and relative `require()`.
58+
*/
59+
export function run(code: string, options?: RunOptions);

lib/coffee-script/lexer.d.ts

+83
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,83 @@
1+
import { LocationData } from './nodes';
2+
3+
export type TokenType =
4+
'BIN?' |
5+
'BOOL' |
6+
'CALL_END' |
7+
'CALL_START' |
8+
'COMPARE' |
9+
'COMPOUND_ASSIGN' |
10+
'COMPOUND_ASSIGN' |
11+
'FOR' |
12+
'FORINSTANCEOF' |
13+
'FUNC_EXIST' |
14+
'HERESTRING' |
15+
'IDENTIFIER' |
16+
'IF' |
17+
'INDENT' |
18+
'INDEX_END' |
19+
'INDEX_SOAK' |
20+
'INDEX_START' |
21+
'INSTANCEOF' |
22+
'LEADING_WHEN' |
23+
'MATH' |
24+
'NEOSTRING' |
25+
'OUTDENT' |
26+
'OUTDENT' |
27+
'OWN' |
28+
'PARAM_END' |
29+
'PARAM_START' |
30+
'REGEX' |
31+
'REGEX_END' |
32+
'REGEX_START' |
33+
'RELATION' |
34+
'SHIFT' |
35+
'STATEMENT' |
36+
'STRING' |
37+
'STRING_END' |
38+
'STRING_START' |
39+
'TERMINATOR' |
40+
'TOKENS' |
41+
'UNARY' |
42+
'UNARY_MATH' |
43+
'UNLESS' |
44+
'WHEN' |
45+
'YIELD';
46+
47+
export type Token = [
48+
TokenType,
49+
// code
50+
string,
51+
LocationData
52+
];
53+
54+
export interface LexerOptions {
55+
literate?: boolean;
56+
line?: number;
57+
column?: number;
58+
untilBalanced?: boolean;
59+
rewrite?: boolean;
60+
}
61+
62+
export class Lexer {
63+
/**
64+
* **tokenize** is the Lexer's main method. Scan by attempting to match tokens
65+
* one at a time, using a regular expression anchored at the start of the
66+
* remaining code, or a custom recursive token-matching method
67+
* (for interpolations). When the next token has been recorded, we move forward
68+
* within the code past the token, and begin again.
69+
*
70+
* Each tokenizing method is responsible for returning the number of characters
71+
* it has consumed.
72+
*
73+
* Before returning the token stream, run it through the [Rewriter](rewriter.html).
74+
*/
75+
tokenize(code: string, opts?: LexerOptions): Array<Token>;
76+
77+
/**
78+
* Preprocess the code to remove leading and trailing whitespace, carriage
79+
* returns, etc. If we're lexing literate CoffeeScript, strip external Markdown
80+
* by removing all lines that aren't indented by at least four spaces or a tab.
81+
*/
82+
clean(code: string): string;
83+
}

0 commit comments

Comments
 (0)