Skip to content

Commit

Permalink
extra cleaning
Browse files Browse the repository at this point in the history
  • Loading branch information
kuba-- committed Jul 23, 2018
1 parent 279ca6b commit e97afb3
Show file tree
Hide file tree
Showing 2 changed files with 32 additions and 55 deletions.
77 changes: 32 additions & 45 deletions token.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,35 @@ import (
"unicode/utf8"
)

// The result of Scan is one of these tokens or a Unicode character.
const (
EOF = -(iota + 1) // reached end of source
Atom // a Prolog atom, possibly quoted
Comment // a comment
Float // a floating point number
Functor // an atom used as a predicate functor
FullStop // "." ending a term
Int // an integer
String // a double-quoted string
Variable // a Prolog variable
Void // the special "_" variable
)

const bufLen = 1024 // at least utf8.UTFMax

var tokenString = map[rune]string{
EOF: "EOF",
Atom: "Atom",
Comment: "Comment",
Float: "Float",
Functor: "Functor",
FullStop: "FullStop",
Int: "Int",
String: "String",
Variable: "Variable",
Void: "Void",
}

// Token encapsulating its type, content and related components.
type Token struct {
Type rune
Expand All @@ -30,11 +59,6 @@ type Token struct {
Components []string
}

// String returns formatted string for the token.
func (t *Token) String() string {
return fmt.Sprintf("{type: %s, term: %s, functor: %s, components: %v}", TokenString(t.Type), t.Term, t.Functor, t.Components)
}

// Tokenize scans and classifies prolog terms.
func Tokenize(terms ...string) []*Token {
var tokens []*Token
Expand Down Expand Up @@ -113,43 +137,6 @@ func (pos position) String() string {
return s
}

// The result of Scan is one of these tokens or a Unicode character.
const (
EOF = -(iota + 1) // reached end of source
Atom // a Prolog atom, possibly quoted
Comment // a comment
Float // a floating point number
Functor // an atom used as a predicate functor
FullStop // "." ending a term
Int // an integer
String // a double-quoted string
Variable // a Prolog variable
Void // the special "_" variable
)

var tokenString = map[rune]string{
EOF: "EOF",
Atom: "Atom",
Comment: "Comment",
Float: "Float",
Functor: "Functor",
FullStop: "FullStop",
Int: "Int",
String: "String",
Variable: "Variable",
Void: "Void",
}

// TokenString returns a printable string for a token or Unicode character.
func TokenString(tok rune) string {
if s, found := tokenString[tok]; found {
return s
}
return fmt.Sprintf("%q", string(tok))
}

const bufLen = 1024 // at least utf8.UTFMax

// A Scanner implements reading of Unicode characters and tokens from an io.Reader.
type scanner struct {
// Input
Expand Down Expand Up @@ -372,7 +359,7 @@ func (s *scanner) scanAlphanumeric(ch rune) rune {
}

func (s *scanner) scanGraphic(ch rune) rune {
for IsGraphic(ch) {
for isGraphic(ch) {
ch = s.next()
}
return ch
Expand All @@ -393,7 +380,7 @@ func digitVal(ch rune) int {
func isDecimal(ch rune) bool { return '0' <= ch && ch <= '9' }

// True if the rune is a graphic token char per ISO §6.4.2
func IsGraphic(ch rune) bool {
func isGraphic(ch rune) bool {
return isOneOf(ch, `#$&*+-./:<=>?@^\~`)
}

Expand Down Expand Up @@ -646,7 +633,7 @@ func (s *scanner) Scan() rune {
tok = Functor
}
}
case IsGraphic(ch):
case isGraphic(ch):
ch = s.next()
tok = Atom
ch = s.scanGraphic(ch)
Expand Down
10 changes: 0 additions & 10 deletions ut_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -82,20 +82,10 @@ func ExampleUnify() {
fmt.Println("X4 = " + mgu["X4"])
fmt.Println("X5 = " + mgu["X5"])

x = "likes(mary,book(title(Title),author(given('Herman'),SurnameTerm)))"
y = "likes(Who,book(title('Moby Dick'),author(given('Herman'),surname('Melville'))))"
mgu = Unify(x, y)
fmt.Println("Title = " + mgu["Title"])
fmt.Println("SurnameTerm = " + mgu["SurnameTerm"])
fmt.Println("Who = " + mgu["Who"])

// Output:
// X1 = g(h(a,b),h(a,b))
// X2 = h(a,b)
// X3 = h(a,b)
// X4 = b
// X5 = b
// Title = 'Moby Dick'
// SurnameTerm = surname('Melville')
// Who = mary
}

0 comments on commit e97afb3

Please sign in to comment.