Skip to content

Commit

Permalink
Cleanup directory structure
Browse files Browse the repository at this point in the history
  • Loading branch information
pgundlach committed Mar 4, 2022
1 parent 12fad77 commit a57a53c
Show file tree
Hide file tree
Showing 10 changed files with 372 additions and 115 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
playground
.vscode
6 changes: 6 additions & 0 deletions Readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
[![Go reference documentation](https://img.shields.io/badge/doc-go%20reference-73FA79)](https://pkg.go.dev/github.com/speedata/goxpath)

XPath 2.0 package for Go.


License: BSD-3-Clause License
2 changes: 1 addition & 1 deletion xpath/debug.go → debug.go
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package xpath
package goxpath

import (
"fmt"
Expand Down
2 changes: 1 addition & 1 deletion xpath/function.go → function.go
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package xpath
package goxpath

import (
"fmt"
Expand Down
2 changes: 1 addition & 1 deletion go.mod
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
module github.com/speedata/goxpath

require github.com/speedata/goxml v0.0.0-20211102133526-01b44237f426
require github.com/speedata/goxml v0.0.0-20220304094847-e25a4c712d89

go 1.17
6 changes: 4 additions & 2 deletions go.sum
Original file line number Diff line number Diff line change
@@ -1,2 +1,4 @@
github.com/speedata/goxml v0.0.0-20211102133526-01b44237f426 h1:MLCL5DKlu9Ag6qT8h489DDfScb9dlW+M+6WfwrAWMo8=
github.com/speedata/goxml v0.0.0-20211102133526-01b44237f426/go.mod h1:VkzYUyr9eWtgHFw9LzlZqgv7U+d+9+ldgNy7+BNMNdo=
github.com/speedata/goxml v0.0.0-20220301131957-3fc2d5740a7b h1:SkKN5KFbtRyE6CYvKul4m/ipG0pXYGqvq7sfPS5ZZAU=
github.com/speedata/goxml v0.0.0-20220301131957-3fc2d5740a7b/go.mod h1:VkzYUyr9eWtgHFw9LzlZqgv7U+d+9+ldgNy7+BNMNdo=
github.com/speedata/goxml v0.0.0-20220304094847-e25a4c712d89 h1:zxChaL4RTqrK9JwTlfFW3UOTDBfz2yEg3s4vUYOFo78=
github.com/speedata/goxml v0.0.0-20220304094847-e25a4c712d89/go.mod h1:VkzYUyr9eWtgHFw9LzlZqgv7U+d+9+ldgNy7+BNMNdo=
126 changes: 63 additions & 63 deletions xpath/tokenizer.go → tokenizer.go
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package xpath
package goxpath

import (
"fmt"
Expand All @@ -10,56 +10,56 @@ import (
type tokenType int

const (
// TokAny contains any token.
TokAny tokenType = iota
// TokString contains any characters including whitespace.
TokString
// TokVarname represents a variable name.
TokVarname
// TokNumber represents a float64.
TokNumber
// TokOperator contains a single or double letter operator or path sepearator.
TokOperator
// TokOpenParen is an opening parenthesis (.
TokOpenParen
// TokCloseParen is a closing parenthesis ).
TokCloseParen
// TokOpenBracket is an opeing bracket [.
TokOpenBracket
// TokCloseBracket is a closing bracket ].
TokCloseBracket
// TokQName is a QName (which might contain one colon).
TokQName
// TokComma represents a comma
TokComma
// TokDoubleColon represents a word with two colons a the end (axis for
// tokAny contains any token.
tokAny tokenType = iota
// tokString contains any characters including whitespace.
tokString
// tokVarname represents a variable name.
tokVarname
// tokNumber represents a float64.
tokNumber
// tokOperator contains a single or double letter operator or path sepearator.
tokOperator
// tokOpenParen is an opening parenthesis (.
tokOpenParen
// tokCloseParen is a closing parenthesis ).
tokCloseParen
// tokOpenBracket is an opeing bracket [.
tokOpenBracket
// tokCloseBracket is a closing bracket ].
tokCloseBracket
// tokQName is a QName (which might contain one colon).
tokQName
// tokComma represents a comma
tokComma
// tokDoubleColon represents a word with two colons a the end (axis for
// example)
TokDoubleColon
tokDoubleColon
)

func (tt tokenType) String() string {
switch tt {
case TokAny:
case tokAny:
return "Any token"
case TokString:
case tokString:
return "string"
case TokVarname:
case tokVarname:
return "variable name"
case TokNumber:
case tokNumber:
return "number"
case TokOperator:
case tokOperator:
return "operator"
case TokOpenParen:
case tokOpenParen:
return "open paren"
case TokCloseParen:
case tokCloseParen:
return "close paren"
case TokOpenBracket:
case tokOpenBracket:
return "open bracket"
case TokCloseBracket:
case tokCloseBracket:
return "close bracket"
case TokQName:
case tokQName:
return "QName"
case TokComma:
case tokComma:
return "comma"
}
return "--"
Expand All @@ -71,7 +71,7 @@ type token struct {
}

func (tok *token) isNCName() bool {
if tok.Typ != TokQName {
if tok.Typ != tokQName {
return false
}
tokAsString := tok.Value.(string)
Expand All @@ -90,15 +90,15 @@ func (toks tokens) String() string {

func (tok token) String() string {
switch tok.Typ {
case TokVarname:
case tokVarname:
return "$" + tok.Value.(string)
case TokOpenParen:
case tokOpenParen:
return "("
case TokCloseParen:
case tokCloseParen:
return ")"
case TokOpenBracket:
case tokOpenBracket:
return "["
case TokCloseBracket:
case tokCloseBracket:
return "]"
}

Expand Down Expand Up @@ -319,11 +319,11 @@ func stringToTokenlist(str string) (*tokenlist, error) {
}
if '0' <= r && r <= '9' {
sr.UnreadRune()
tokens = append(tokens, token{getNum(sr), TokNumber})
tokens = append(tokens, token{getNum(sr), tokNumber})
} else if r == '.' {
nextRune, _, err := sr.ReadRune()
if err == io.EOF {
tokens = append(tokens, token{".", TokOperator})
tokens = append(tokens, token{".", tokOperator})
break
}
if err != nil {
Expand All @@ -332,24 +332,24 @@ func stringToTokenlist(str string) (*tokenlist, error) {
if '0' <= nextRune && nextRune <= '9' {
sr.UnreadRune()
sr.UnreadRune()
tokens = append(tokens, token{getNum(sr), TokNumber})
tokens = append(tokens, token{getNum(sr), tokNumber})
} else {
sr.UnreadRune()
tokens = append(tokens, token{".", TokOperator})
tokens = append(tokens, token{".", tokOperator})
}
} else if r == '+' || r == '-' || r == '*' || r == '?' || r == '@' || r == '|' || r == '=' {
tokens = append(tokens, token{string(r), TokOperator})
tokens = append(tokens, token{string(r), tokOperator})
} else if r == ',' {
tokens = append(tokens, token{string(r), TokComma})
tokens = append(tokens, token{string(r), tokComma})
} else if r == '>' || r == '<' {
nextRune, _, err := sr.ReadRune()
if err != nil {
return nil, err
}
if nextRune == '=' || nextRune == r {
tokens = append(tokens, token{string(r) + string(nextRune), TokOperator})
tokens = append(tokens, token{string(r) + string(nextRune), tokOperator})
} else {
tokens = append(tokens, token{string(r), TokOperator})
tokens = append(tokens, token{string(r), tokOperator})
sr.UnreadRune()
}
} else if r == '!' {
Expand All @@ -358,35 +358,35 @@ func stringToTokenlist(str string) (*tokenlist, error) {
return nil, err
}
if nextRune == '=' {
tokens = append(tokens, token{"!=", TokOperator})
tokens = append(tokens, token{"!=", tokOperator})
} else {
return nil, fmt.Errorf("= expected after !, got %s", string(nextRune))
}
} else if r == '/' || r == ':' {
nextRune, _, err := sr.ReadRune()
if err == io.EOF {
tokens = append(tokens, token{string(r), TokOperator})
tokens = append(tokens, token{string(r), tokOperator})
break
}
if err != nil {
return nil, err
}
if nextRune == r {
tokens = append(tokens, token{string(r) + string(r), TokOperator})
tokens = append(tokens, token{string(r) + string(r), tokOperator})
} else {
tokens = append(tokens, token{string(r), TokOperator})
tokens = append(tokens, token{string(r), tokOperator})
sr.UnreadRune()
}
} else if r == '[' {
tokens = append(tokens, token{r, TokOpenBracket})
tokens = append(tokens, token{r, tokOpenBracket})
} else if r == ']' {
tokens = append(tokens, token{r, TokCloseBracket})
tokens = append(tokens, token{r, tokCloseBracket})
} else if r == '$' {
qname, err := getQName(sr)
if err != nil {
return nil, err
}
tokens = append(tokens, token{qname, TokVarname})
tokens = append(tokens, token{qname, tokVarname})
} else if unicode.IsSpace(r) {
// ignore whitespace
} else if unicode.IsLetter(r) {
Expand All @@ -397,14 +397,14 @@ func stringToTokenlist(str string) (*tokenlist, error) {
}
nextRune, _, err := sr.ReadRune()
if err == io.EOF {
tokens = append(tokens, token{word, TokQName})
tokens = append(tokens, token{word, tokQName})
break
}
if nextRune == ':' {
tokens = append(tokens, token{strings.TrimSuffix(word, ":"), TokDoubleColon})
tokens = append(tokens, token{strings.TrimSuffix(word, ":"), tokDoubleColon})
} else {
sr.UnreadRune()
tokens = append(tokens, token{word, TokQName})
tokens = append(tokens, token{word, tokQName})
}

} else if r == '\'' || r == '"' {
Expand All @@ -413,7 +413,7 @@ func stringToTokenlist(str string) (*tokenlist, error) {
if err != nil {
return nil, err
}
tokens = append(tokens, token{str, TokString})
tokens = append(tokens, token{str, tokString})
} else if r == '(' {
nextRune, _, err := sr.ReadRune()
if err == io.EOF {
Expand All @@ -432,10 +432,10 @@ func stringToTokenlist(str string) (*tokenlist, error) {
// tokens = append(tokens, token{cmt, TokAny})
} else {
sr.UnreadRune()
tokens = append(tokens, token{r, TokOpenParen})
tokens = append(tokens, token{r, tokOpenParen})
}
} else if r == ')' {
tokens = append(tokens, token{r, TokCloseParen})
tokens = append(tokens, token{r, tokCloseParen})
} else {
return nil, fmt.Errorf("Invalid char for xpath expression %q", string(r))
}
Expand Down
40 changes: 20 additions & 20 deletions xpath/tokenizer_test.go → tokenizer_test.go
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package xpath
package goxpath

import (
"strings"
Expand All @@ -10,7 +10,7 @@ func TestNCName(t *testing.T) {
input token
output bool
}{
{token{TokNumber, 1}, false},
{token{tokNumber, 1}, false},
}
for _, td := range testdata {
got := td.input.isNCName()
Expand Down Expand Up @@ -73,7 +73,7 @@ func TestGetAxis(t *testing.T) {
input string
output []token
}{
{`child::sub`, []token{{"child", TokDoubleColon}, {"sub", TokQName}}},
{`child::sub`, []token{{"child", tokDoubleColon}, {"sub", tokQName}}},
}
for _, td := range testdata {
toklist, err := stringToTokenlist(td.input)
Expand All @@ -99,23 +99,23 @@ func TestOperator(t *testing.T) {
input string
output []token
}{
{`< (:comment (:nested :) :) `, []token{{`<`, TokOperator}}},
{`"hello"`, []token{{"hello", TokString}}},
{`'hello'`, []token{{"hello", TokString}}},
{`< `, []token{{`<`, TokOperator}}},
{`<= `, []token{{`<=`, TokOperator}}},
{`> `, []token{{`>`, TokOperator}}},
{`>= `, []token{{`>=`, TokOperator}}},
{`!= `, []token{{`!=`, TokOperator}}},
{`<< `, []token{{`<<`, TokOperator}}},
{`>> `, []token{{`>>`, TokOperator}}},
{`/ `, []token{{`/`, TokOperator}}},
{`// `, []token{{`//`, TokOperator}}},
{`: `, []token{{`:`, TokOperator}}},
{`:: `, []token{{`::`, TokOperator}}},
{`.`, []token{{`.`, TokOperator}}},
{`(1,2)`, []token{{'(', TokOpenParen}, {1.0, TokNumber}, {`,`, TokComma}, {2.0, TokNumber}, {')', TokCloseParen}}},
{`$hello`, []token{{"hello", TokVarname}}},
{`< (:comment (:nested :) :) `, []token{{`<`, tokOperator}}},
{`"hello"`, []token{{"hello", tokString}}},
{`'hello'`, []token{{"hello", tokString}}},
{`< `, []token{{`<`, tokOperator}}},
{`<= `, []token{{`<=`, tokOperator}}},
{`> `, []token{{`>`, tokOperator}}},
{`>= `, []token{{`>=`, tokOperator}}},
{`!= `, []token{{`!=`, tokOperator}}},
{`<< `, []token{{`<<`, tokOperator}}},
{`>> `, []token{{`>>`, tokOperator}}},
{`/ `, []token{{`/`, tokOperator}}},
{`// `, []token{{`//`, tokOperator}}},
{`: `, []token{{`:`, tokOperator}}},
{`:: `, []token{{`::`, tokOperator}}},
{`.`, []token{{`.`, tokOperator}}},
{`(1,2)`, []token{{'(', tokOpenParen}, {1.0, tokNumber}, {`,`, tokComma}, {2.0, tokNumber}, {')', tokCloseParen}}},
{`$hello`, []token{{"hello", tokVarname}}},
}
for _, td := range testdata {
toklist, err := stringToTokenlist(td.input)
Expand Down
Loading

0 comments on commit a57a53c

Please sign in to comment.