Compare commits

...

3 Commits

3 changed files with 102 additions and 4 deletions

View File

@ -63,6 +63,7 @@ func (err Error) Error () (formattedMessage string) {
columnCountdown --
}
for err.width > 1 {
// TODO: for tabs, print out 8 of these instead.
formattedMessage += "-"
}
formattedMessage += "-\n"

View File

@ -36,9 +36,9 @@ func (lexer *LexingOperation) tokenize () (err error) {
number := lexer.char >= '0' && lexer.char <= '9'
if number {
// TODO: tokenize number
// TODO: tokenize number begin
} else if lowercase || uppercase {
// TODO: tokenize multi
// TODO: tokenize alpha begin
} else {
err = lexer.tokenizeSymbolBeginning()
if err != nil { return err }
@ -52,13 +52,37 @@ func (lexer *LexingOperation) tokenize () (err error) {
func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
switch lexer.char {
case '#':
// comment
for lexer.char != '\n' {
lexer.nextRune()
}
case '\t':
// indent level
previousToken := lexer.tokens[len(lexer.tokens) - 1]
if !previousToken.Is(TokenKindNewline) ||
!previousToken.Is(TokenKindNewline) {
file.NewError (
lexer.file.Location(), 1,
"tab not used as indent",
file.ErrorKindWarn)
break
}
for lexer.char == '\t' {
lexer.addToken (Token {
kind: TokenKindIndent,
})
lexer.nextRune()
}
case '\n':
// line break
lexer.addToken (Token {
kind: TokenKindNewline,
})
lexer.nextRune()
case '"':
// TODO: tokenize string literal
lexer.nextRune()
@ -95,8 +119,56 @@ func (lexer *LexingOperation) tokenizeSymbolBeginning () (err error) {
kind: TokenKindRBrace,
})
lexer.nextRune()
// TODO: add more for things like math symbols, return
// direction operators, indentation, etc
case '+':
lexer.addToken (Token {
kind: TokenKindPlus,
})
lexer.nextRune()
case '-':
// TODO: tokenize dash begin
lexer.nextRune()
case '*':
lexer.addToken (Token {
kind: TokenKindAsterisk,
})
lexer.nextRune()
case '/':
lexer.addToken (Token {
kind: TokenKindSlash,
})
lexer.nextRune()
case '@':
lexer.addToken (Token {
kind: TokenKindAt,
})
lexer.nextRune()
case '!':
lexer.addToken (Token {
kind: TokenKindExclamation,
})
lexer.nextRune()
case '%':
lexer.addToken (Token {
kind: TokenKindPercent,
})
lexer.nextRune()
case '~':
lexer.addToken (Token {
kind: TokenKindTilde,
})
lexer.nextRune()
case '<':
// TODO: tokenize less than begin
lexer.nextRune()
case '>':
// TODO: tokenize greater than begin
lexer.nextRune()
case '|':
// TODO: tokenize bar begin
lexer.nextRune()
case '&':
// TODO: tokenize and begin
lexer.nextRune()
default:
err = file.NewError (
lexer.file.Location(), 1,

View File

@ -11,6 +11,7 @@ const (
TokenKindSeparator
TokenKindPermission
TokenKindReturnDirection
TokenKindInt
TokenKindFloat
@ -27,6 +28,25 @@ const (
TokenKindRBracket
TokenKindLBrace
TokenKindRBrace
TokenKindPlus
TokenKindMinus
TokenKindAsterisk
TokenKindSlash
TokenKindAt
TokenKindExclamation
TokenKindPercent
TokenKindTilde
TokenKindLessThan
TokenKindLShift
TokenKindGreaterThan
TokenKindRShift
TokenKindBinaryOr
TokenKindLogicalOr
TokenKindBinaryAnd
TokenKindLogicalAnd
)
// Token represents a single token. It holds its location in the file, as well
@ -42,6 +62,11 @@ func (token Token) Kind () (kind TokenKind) {
return token.kind
}
// Is returns whether or not the token is of kind kind.
func (token Token) Is (kind TokenKind) (match bool) {
return token.kind == kind
}
// Value returns the value of the token. Depending on what kind of token it is,
// this value may be nil.
func (token Token) Value () (value any) {