Skip to content

Commit

Permalink
Count our line-offsets only AFTER we've updated IFs.
Browse files Browse the repository at this point in the history
Nowadays we rewrite some IF statements, turning this:

        IF A < B THEN 300 ELSE 400

into this:

        IF A < B THEN GOTO 300 ELSE 400

By doing this we change the length of our program, and that means
that any calculated line-number -> program-offset mapping is
off.  Calculate the line-number indexes _after_ expansion, to
ensure they're always correct.
  • Loading branch information
skx committed Jan 26, 2019
1 parent 40e0697 commit 41adb1f
Showing 1 changed file with 26 additions and 26 deletions.
52 changes: 26 additions & 26 deletions eval/eval.go
Original file line number Diff line number Diff line change
Expand Up @@ -145,41 +145,19 @@ func New(stream *tokenizer.Tokenizer) (*Interpreter, error) {
// Save the tokens that our program consists of, one by one,
// until we hit the end.
//
// We also record the offset at which each line starts, which
// means that the GOTO & GOSUB statements don't need to scan
// the program from start to finish to find the destination
// to jump to.
// We also insert any implied GOTO statements into IF
// statements which lack them.
//
offset := 0
for {

//
// Process each token from our tokenizer.
// Fetch the next token from our tokenizer.
//
tok := stream.NextToken()
if tok.Type == token.EOF {
break
}

//
// Did we find a line-number?
//
if tok.Type == token.LINENO {

// Save the offset in the map
line := tok.Literal

// Already an offset? That means we
// have duplicate line-numbers
if t.lines[line] != 0 {
fmt.Printf("WARN: Line %s is duplicated - GOTO/GOSUB behaviour is undefined\n", line)
}
t.lines[line] = offset

// TODO: Warn about line-numbers not being
// sequential. Or at least going-backwards.
}

//
// If the previous token was a "THEN" or "ELSE", and the
// current token is an integer then we add in the implicit
Expand All @@ -198,12 +176,13 @@ func New(stream *tokenizer.Tokenizer) (*Interpreter, error) {
}
}

//
// Append the token to our array
//
t.program = append(t.program, tok)

// Continue - recording the previous token too.
prevToken = tok
offset++
}

//
Expand Down Expand Up @@ -238,6 +217,27 @@ func New(stream *tokenizer.Tokenizer) (*Interpreter, error) {
//
for offset, tok := range t.program {

//
// Did we find a line-number?
//
if tok.Type == token.LINENO {

//
// Get the line-number.
//
line := tok.Literal

//
// Do we already have an offset saved?
//
// If so that means we have duplicate line-numbers
//
if t.lines[line] != 0 {
fmt.Printf("WARN: Line %s is duplicated - GOTO/GOSUB behaviour is undefined\n", line)
}
t.lines[line] = offset
}

//
// If we're in a comment then skip all action until
// we hit the next newline (or EOF).
Expand Down

0 comments on commit 41adb1f

Please sign in to comment.