Merge pull request 'document-packages' (#26) from document-packages into main

Reviewed-on: sashakoshka/fspl#26
This commit is contained in:
Sasha Koshka 2024-02-09 08:39:46 +00:00
commit 5e3ef3f57c
18 changed files with 132 additions and 56 deletions

5
analyzer/doc.go Normal file
View File

@ -0,0 +1,5 @@
// Package analyzer implements the semantic analysis stage of the FSPL compiler.
// The analyzer takes in a well-formed abstract syntax tree, ensures its
// semantic correctness, and fills in the semantic information stored within
// the tree.
package analyzer

21
cmd/fsplc/doc.go Normal file
View File

@ -0,0 +1,21 @@
// FSPLC compiles FSPL programs.
//
// Its job is to take in FSPL code, and compile it to one of the supported
// output formats. Currently it supports native object files, native ASM, and
// LLVM IR code.
//
// Usage:
// fsplc [OPTION]... [FILE]...
//
// -O=0
// Optimization level, 0-3. This does not have an effect when outputting
// LLVM IR code.
// -o=FILE.o
// The file to output to. The file extension determines what output format
// to use:
// - .o: Native object file
// - .s: Native assembly
// - .ll: LLVM IR code
// If unspecified, the name (without the extension) of the first input file
// is used, and a .o extension is added to produce an object file.
package main

4
entity/doc.go Normal file
View File

@ -0,0 +1,4 @@
// Package entity provides data representations of language concepts.
// They are used to construct syntax trees, and semantic trees. It additionally
// provides some utility functions to compare certain entities.
package entity

View File

@ -8,10 +8,20 @@ type TopLevel interface {
topLevel ()
}
// Access determines the external access rule for a top-level entity.
// Access determines the external access control mode for a top-level entity.
type Access int; const (
// AccessPrivate disallows other modules from accessing a top-level
// entity. This is the default access mode.
AccessPrivate Access = iota
// AccessRestricted causes a top-level entity to appear opaque to other
// modules. Values of restricted types can be passed around, assigned
// to eachother, and their methods can be called, but the implementation
// of the type is entirely hidden. This access mode cannot be applied to
// functions.
AccessRestricted
// AccessPublic allows other modules to access an entity normally.
AccessPublic
)

5
errors/doc.go Normal file
View File

@ -0,0 +1,5 @@
// Package errors provides a location tracking and error formatting system.
// It provides a way to keep track of the in-file position of the physical text
// that in-memory data structures were derived from, and a way to use this
// positional information to create informative and easy to read errors.
package errors

View File

@ -37,6 +37,18 @@ func Errorf (position Position, format string, variables ...any) Error {
// Format returns a formatted string representing an error. This string may take
// up multiple lines and contain ANSI escape codes. If err does not fulfill the
// Error interface, err.Error() is returned instead.
//
// When the error fulfills the Error interface, the formatted output will
// be of the general form:
// FILE:ROW+1:START+1: MESSAGE
// ROW+1 | LINE
// ^^^^
// Where the position of the underline (^^^^) corresponds to the start and end
// fields in the error's position. Note that the row and column numbers as
// displayed are increased by one from their normal zero-indexed state, because
// most editors display row and column numbers starting at 1:1. Additionally,
// because normal error messages do not produce trailing line breaks, neither
// does this function.
func Format (err error) string {
if err, ok := err.(Error); ok {
return fmt.Sprintf (

View File

@ -7,14 +7,19 @@ import "strings"
// positions in the same file should be created using the same File string, and
// positions on the same line should be created using the same Line string.
type Position struct {
File string
Line string
File string // The name of the file
Line string // the text of the line the position is on
Row int
Start int
End int
Row int // Line number, starting at 0
Start int // Starting column number, starting at 0
End int // Terminating column number, starting at 0
}
// String returns a string representation of the position of the form:
// FILE:ROW+1:START+1
// Note that the row and column numbers as displayed are increased by one from
// their normal zero-indexed state, because most editors display row and column
// numbers starting at 1:1.
func (pos Position) String () string {
return fmt.Sprintf("%s:%d:%d", pos.File, pos.Row + 1, pos.Start + 1)
}

4
generator/doc.go Normal file
View File

@ -0,0 +1,4 @@
// Package generator implements the code generation stage of the FSPL compiler.
// It converts a well formed semantic tree into LLVM IR code, and outputs it to
// an io.Writer.
package generator

View File

@ -1,3 +1,5 @@
// Package native provides a generator target describing the current system.
// This is accomplished using several conditionally compiled source files.
package native
import "git.tebibyte.media/sashakoshka/fspl/generator"

View File

@ -1,17 +1,26 @@
// Package integer provides utilities for working with integer data.
package integer
func UnsignedMin(width int) uint64 {
// UnsignedMin returns the minimum value that an unsigned integer of the
// specified width can represent.
func UnsignedMin (width int) uint64 {
return 0
}
func UnsignedMax(width int) uint64 {
// UnsignedMax returns the maximum value that an unsigned integer of the
// specified width can represent.
func UnsignedMax (width int) uint64 {
return (1 << width) - 1
}
func SignedMin(width int) int64 {
// SignedMin returns the minimum value that a signed integer of the specified
// width can represent.
func SignedMin (width int) int64 {
return -1 - int64(UnsignedMax(width) / 2)
}
func SignedMax(width int) int64 {
// SignedMax returns the maximum value that a signed integer of the specified
// width can represent.
func SignedMax (width int) int64 {
return int64(UnsignedMax(width) / 2)
}

6
lexer/doc.go Normal file
View File

@ -0,0 +1,6 @@
// Package lexer implements the lexical analysis stage of the FSPL compiler.
// Its job is to convert text into a series of tokens (lexemes) which are then
// passed to other parts of the compiler to be interpreted into more complex
// data structures. The lexer is able to read in new tokens as needed instead of
// reading them in all at once.
package lexer

View File

@ -6,6 +6,7 @@ import "bufio"
import "unicode"
import "git.tebibyte.media/sashakoshka/fspl/errors"
// TokenKind is an enumeration of all tokens the FSPL compiler recognizes.
type TokenKind int; const (
EOF TokenKind = -(iota + 1)
@ -30,6 +31,8 @@ type TokenKind int; const (
Star // \*
)
// String returns a string representation of the token kind. The result for any
// kind corresponds directly to the name of the constant which defines it.
func (kind TokenKind) String () string {
switch kind {
case EOF: return "EOF"
@ -54,35 +57,17 @@ func (kind TokenKind) String () string {
}
}
func Symbols () map[string] TokenKind {
return map[string] TokenKind {
"EOF": EOF,
"Ident": Ident,
"TypeIdent": TypeIdent,
"Int": Int,
"Float": Float,
"String": String,
"Symbol": Symbol,
"LParen": LParen,
"LBrace": LBrace,
"LBracket": LBracket,
"RParen": RParen,
"RBrace": RBrace,
"RBracket": RBracket,
"Colon": Colon,
"DoubleColon": DoubleColon,
"Dot": Dot,
"DoubleDot": DoubleDot,
"Star": Star,
}
}
// Token represents a single lexeme of an FSPL file.
type Token struct {
Kind TokenKind
Value string
Position errors.Position
Position errors.Position // The position of the token in its file
Kind TokenKind // Which kind of token it is
Value string // The token's value
}
// String returns a string representation of the token, which is of the form:
// KIND 'VALUE'
// or if the value is empty:
// KIND
func (tok Token) String () string {
output := tok.Kind.String()
if tok.Value != "" {
@ -91,10 +76,12 @@ func (tok Token) String () string {
return output
}
// EOF returns whether or not the token is an EOF token.
func (tok Token) EOF () bool {
return tok.Kind == EOF
}
// Is returns whether or not the token kind matches any of the given kinds.
func (tok Token) Is (kinds ...TokenKind) bool {
for _, kind := range kinds {
if tok.Kind == kind { return true }
@ -102,6 +89,7 @@ func (tok Token) Is (kinds ...TokenKind) bool {
return false
}
// Is returns whether or not the token value matches any of the given values.
func (tok Token) ValueIs (values ...string) bool {
for _, value := range values {
if tok.Value == value { return true }
@ -109,11 +97,18 @@ func (tok Token) ValueIs (values ...string) bool {
return false
}
// Lexer is an object capable of producing tokens.
type Lexer interface {
// Next returns the next token. If there are no more tokens, it returns
// an EOF token. It only returns an error on EOF if the file terminated
// unexpectedly.
Next () (Token, error)
}
// NewLexer creates a new default lexer that reads from the given reader. The
// filename parameter is used for token locations and error messages.
func NewLexer (filename string, reader io.Reader) (Lexer, error) {
// TODO: replace this function with LexReader and LexFile functions
lexer := &fsplLexer {
filename: filename,
lineScanner: bufio.NewScanner(reader),

View File

@ -1,20 +0,0 @@
# llvm
This package was created for the express purpose of generating LLVM IR. A good
portion of this code was taken from the [llir project](https://github.com/llir/llvm).
This package supports:
- Instructions and terminators
- Constants
- Modules
- Functions
- Types
- Type defs
- Blocks
- Opaque pointers
This package does not support:
- Constant expressions
- Sanity checking
- Typed pointers
- Metadata and attributes

8
llvm/doc.go Normal file
View File

@ -0,0 +1,8 @@
// Package llvm provides a way to build and write out LLVM modules.
//
// This package was created for the express purpose of generating LLVM IR. A
// good portion of this code was taken from https://github.com/llir/llvm. As
// such, it does not contain features such as sanity checking and IR parsing.
// The main reason this package was created was to add support for opaque
// pointers and drop support for the legacy typed ones.
package llvm

6
parser/doc.go Normal file
View File

@ -0,0 +1,6 @@
// Package parser implements the parsing stage of the FSPL compiler.
// The parser takes in a stream of tokens from the lexer and converts them into
// an abstract syntax tree (AST), which is assembled out of structures from the
// entity package. The parser only fills out fields on the structures that
// pertain to position and syntax.
package parser

View File

@ -45,6 +45,7 @@ type Parser struct {
// NewParser creates a new parser that parses the given file.
func NewParser (name string, file io.Reader) (*Parser, error) {
// TODO: have the parser take in a lexer instead of creating one.
lx, err := lexer.NewLexer(name, file)
if err != nil { return nil, err }

View File

@ -11,6 +11,8 @@ type Tree struct {
Declarations []entity.TopLevel
}
// String returns a string representation of the tree. The result of this will
// be syntactically valid.
func (this *Tree) String () string {
out := ""
for index, declaration := range this.Declarations {

View File

@ -1,3 +1,4 @@
// Package testcommon provides re-usable unit testing functionality.
package testcommon
import "testing"