Replaced all occurences of github.com with git.tebibyte.media

This commit is contained in:
Sasha Koshka 2022-08-12 10:21:36 -05:00
parent 09170e390d
commit 81b47f7734
9 changed files with 12 additions and 12 deletions

View File

@ -2,7 +2,7 @@ package arfc
import "os" import "os"
import "fmt" import "fmt"
import "github.com/sashakoshka/arf" import "git.tebibyte.media/sashakoshka/arf"
func main () { func main () {
if len(os.Args) != 2 { if len(os.Args) != 2 {

2
go.mod
View File

@ -1,3 +1,3 @@
module github.com/sashakoshka/arf module git.tebibyte.media/sashakoshka/arf
go 1.18 go 1.18

View File

@ -1,8 +1,8 @@
package lexer package lexer
import "io" import "io"
import "github.com/sashakoshka/arf/file" import "git.tebibyte.media/sashakoshka/arf/file"
import "github.com/sashakoshka/arf/types" import "git.tebibyte.media/sashakoshka/arf/types"
// LexingOperation holds information about an ongoing lexing operataion. // LexingOperation holds information about an ongoing lexing operataion.
type LexingOperation struct { type LexingOperation struct {

View File

@ -1,8 +1,8 @@
package lexer package lexer
import "testing" import "testing"
import "github.com/sashakoshka/arf/file" import "git.tebibyte.media/sashakoshka/arf/file"
import "github.com/sashakoshka/arf/types" import "git.tebibyte.media/sashakoshka/arf/types"
func checkTokenSlice (filePath string, correct []Token, test *testing.T) { func checkTokenSlice (filePath string, correct []Token, test *testing.T) {
file, err := file.Open(filePath) file, err := file.Open(filePath)

View File

@ -1,6 +1,6 @@
package lexer package lexer
import "github.com/sashakoshka/arf/file" import "git.tebibyte.media/sashakoshka/arf/file"
// tokenizeSymbolBeginning lexes a token that starts with a number. // tokenizeSymbolBeginning lexes a token that starts with a number.
func (lexer *LexingOperation) tokenizeNumberBeginning (negative bool) (err error) { func (lexer *LexingOperation) tokenizeNumberBeginning (negative bool) (err error) {

View File

@ -1,7 +1,7 @@
package lexer package lexer
import "strconv" import "strconv"
import "github.com/sashakoshka/arf/file" import "git.tebibyte.media/sashakoshka/arf/file"
// tokenizeString tokenizes a string or rune literal. // tokenizeString tokenizes a string or rune literal.
func (lexer *LexingOperation) tokenizeString (isRuneLiteral bool) (err error) { func (lexer *LexingOperation) tokenizeString (isRuneLiteral bool) (err error) {

View File

@ -1,7 +1,7 @@
package lexer package lexer
import "fmt" import "fmt"
import "github.com/sashakoshka/arf/file" import "git.tebibyte.media/sashakoshka/arf/file"
// TokenKind is an enum represzenting what role a token has. // TokenKind is an enum represzenting what role a token has.
type TokenKind int type TokenKind int

View File

@ -1,7 +1,7 @@
package arf package arf
import "io" import "io"
import "github.com/sashakoshka/arf/parser" import "git.tebibyte.media/sashakoshka/arf/parser"
func CompileModule (modulePath string, output io.Writer) (err error) { func CompileModule (modulePath string, output io.Writer) (err error) {
_, err = parser.Parse(modulePath) _, err = parser.Parse(modulePath)

View File

@ -2,8 +2,8 @@ package parser
import "os" import "os"
import "path/filepath" import "path/filepath"
import "github.com/sashakoshka/arf/file" import "git.tebibyte.media/sashakoshka/arf/file"
import "github.com/sashakoshka/arf/lexer" import "git.tebibyte.media/sashakoshka/arf/lexer"
// ParsingOperation holds information about an ongoing parsing operation. // ParsingOperation holds information about an ongoing parsing operation.
type ParsingOperation struct { type ParsingOperation struct {