blob: fb9b21210d652a2475c596ab53131dc35e15cbbf (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
|
#ifndef lexer_h
#define lexer_h
#include "mio.h"
#include "buffer.h"
#include "token.h"
////////////////////////////////////////////////////////////////////////////
//
// Lexer is a lexical scanner - translates a text file into individual
// tokens. The Token class is used to represent an individual token, the
// Lexer class is used to transform a text file into a list of tokens.
//
// The scanning algorythm is quite involved. We may read ahead in the file,
// then back up. An 'undo' buffer is maintained. The actual file is
// referred to indirectly via the Mio class.
//
// The concept of the 'current line number' and 'filename' is also recorded
// by the Lexer object. An error message function is provided - given an
// error message, the filename, and the line number is printer to standard
// error, followed by the error message.
//
////////////////////////////////////////////////////////////////////////////
class Lexer {
Mio file;
int linenum;
Buffer filename;
Token::tokentype lasttokentype;
// curchar() represents the next character in the file.
// Calling curchar() does NOT actual "read" the character, this
// is a "peek" function.
int curchar() { return (file.peek()); }
// nextchar() is used to actually read the next character.
int nextchar() { int c=file.get();
if (c == '\n') ++linenum;
return (c);
}
void error(const char *);
public:
Lexer() {}
~Lexer() {}
int Open(const char *); // Open file to read
void token(Token &); // Scan the next token in
private:
void token2(Token &);
public:
void errmsg(const char *); // Show error message
void errmsg(unsigned long, const char *);
// Show error message for a different
// line
int Linenum() { return (linenum); }
// Return current line number in recipe
// file
} ;
#endif
|