This commit is contained in:
Alexander Kiryukhin 2022-06-13 04:31:31 +03:00
commit 0562659220
No known key found for this signature in database
GPG key ID: 6DF7A2910D0699E9
19 changed files with 1909 additions and 0 deletions

130
README.md Normal file
View file

@ -0,0 +1,130 @@
# Lexpr - universal expression evaluator
This library can evaluate any types of expressions: math expression, logic expression, simple DSLs.
## Installation
`go get go.neonxp.dev/lexpr`
## Usage
```go
ctx := context.Background()
l := lexpr.New(lexpr.WithDefaults())
// Simple math
result1 := <-l.Eval(ctx, `2 + 2 * 2`) // Output channel can return many results
log.Println("Result 1:", result1.Value) // Output: 6
// Helper for exact one result
result2, err := l.OneResult(ctx, `len("test") + 10`)
if err != nil {
log.Fatal(err)
}
log.Println("Result 2:", result2) // Output: 14
// Custom functions
l.SetFunction("add", func(ts *lexpr.TokenStack) error {
a, okA := ts.Pop().Number() // first func argument
b, okB := ts.Pop().Number() // second func argument
if !okA || !okB {
return fmt.Errorf("Both args must be number")
}
ts.Push(lexpr.TokenFromInt(a + b))
return nil
})
result3, err := l.OneResult(ctx, `add(12, 24) * 2`)
if err != nil {
log.Fatal(err)
}
log.Println("Result 3:", result3) // Output: 72
// JSON extraction via dots and variables
jsonString := `{
"rootKey1": "value1",
"rootKey2": {
"childKey1": "value2",
"childKey2": "value3"
},
"arrayKey": [
"array value 1",
"array value 2",
"array value 3",
"array value 4"
]
}`
key1name := "rootKey1"
l.SetVariable("jsonData", jsonString)
l.SetVariable("key1name", key1name)
result41, err := l.OneResult(ctx, `jsonData.key1name`)
if err != nil {
log.Fatal(err)
}
log.Println("Result 4-1:", result41) // Output: "value1"
result42, err := l.OneResult(ctx, `jsonData.rootKey2.childKey2`)
if err != nil {
log.Fatal(err)
}
log.Println("Result 4-2:", result42) // Output: "value3"
result43, err := l.OneResult(ctx, `jsonData.arrayKey.3`)
if err != nil {
log.Fatal(err)
}
log.Println("Result 4-3:", result43) // Output: "array value 4"
// Logic expressions
result51, err := l.OneResult(ctx, `jsonData.key1name == "value1"`)
if err != nil {
log.Fatal(err)
}
log.Println("Result 5-1:", result51) // Output: 1
result52, err := l.OneResult(ctx, `10 >= 5 || 10 <= 5`)
if err != nil {
log.Fatal(err)
}
log.Println("Result 5-2:", result52) // Output: 1
result53, err := l.OneResult(ctx, `10 >= 5 && 10 <= 5`)
if err != nil {
log.Fatal(err)
}
log.Println("Result 5-3:", result53) // Output: 0
```
## Default operators
|Operator|Description|Example|
|:------:|:---------:|:-----:|
||JSON operators||
|`.`|Extract field from json|`jsonData.key1.0.key2`|
||Math operators||
|`**`|Power number|`3 ** 3` = 27|
|`*`|Multiple numbers|`2 * 4` = 8|
|`/`|Divide number|`6 / 3` = 2|
|`%`|Rem of division|`5 % 3` = 2|
|`+`|Sum|`2 + 2` = 4|
|`-`|Substract|`6 - 2` = 4|
||Logic operators||
|`!`|Logic not|`!1` = 0|
|`>`|More|`3 > 2` = 1|
|`>=`|More or equal|`3 >= 3` = 1|
|`<`|Less|`3 < 2` = 0|
|`<=`|Less or equal|`3 <= 3` = 1|
|`==`|Equal|`1==1` = 1|
|`!=`|Not equal|`1!=1` = 0|
|`&&`|Logic and|`3 > 0 && 1 > 0` = 1|
|`||`|Logic or|`1 > 0 || 1 == 1` = 1|
## Default functions
|Function|Description|Example|
|:------:|:---------:|:-----:|
|max|returns max of two values|`max(1,2)` = 2|
|min|returns min of two values|`max(1,2)` = 1|
|len|returns length of string|`len("test")` = 4|
|atoi|converts string to number|`atoi("123")` = 123|
|itoa|converts number to string|`itoa(123)` = "123"|
## Contribution
PRs are welcome.

91
example/main.go Normal file
View file

@ -0,0 +1,91 @@
package main
import (
"context"
"fmt"
"log"
"go.neonxp.dev/lexpr"
)
func main() {
ctx := context.Background()
l := lexpr.New(lexpr.WithDefaults())
// Simple math
result1 := <-l.Eval(ctx, `2 + 2 * 2`)
log.Println("Result 1:", result1.Value)
// Helper for one result
result2, err := l.OneResult(ctx, `len("test") + 10`)
if err != nil {
log.Fatal(err)
}
log.Println("Result 2:", result2)
// Custom functions
l.SetFunction("add", func(ts *lexpr.TokenStack) error {
a, okA := ts.Pop().Number() // first func argument
b, okB := ts.Pop().Number() // second func argument
if !okA || !okB {
return fmt.Errorf("Both args must be number")
}
ts.Push(lexpr.TokenFromInt(a + b))
return nil
})
result3, err := l.OneResult(ctx, `add(12, 24) * 2`)
if err != nil {
log.Fatal(err)
}
log.Println("Result 3:", result3)
// JSON extraction via dots and variables
jsonString := `{
"rootKey1": "value1",
"rootKey2": {
"childKey1": "value2",
"childKey2": "value3"
},
"arrayKey": [
"array value 1",
"array value 2",
"array value 3",
"array value 4"
]
}`
key1name := "rootKey1"
l.SetVariable("jsonData", jsonString)
l.SetVariable("key1name", key1name)
result41, err := l.OneResult(ctx, `jsonData.key1name`) // = value1
if err != nil {
log.Fatal(err)
}
log.Println("Result 4-1:", result41)
result42, err := l.OneResult(ctx, `jsonData.rootKey2.childKey2`) // = value3
if err != nil {
log.Fatal(err)
}
log.Println("Result 4-2:", result42)
result43, err := l.OneResult(ctx, `jsonData.arrayKey.3`) // = array value 4
if err != nil {
log.Fatal(err)
}
log.Println("Result 4-3:", result43)
// Logic expressions
result51, err := l.OneResult(ctx, `jsonData.key1name == "value1"`) // = 1
if err != nil {
log.Fatal(err)
}
log.Println("Result 5-1:", result51)
result52, err := l.OneResult(ctx, `10 >= 5 || 10 <= 5`) // = 1
if err != nil {
log.Fatal(err)
}
log.Println("Result 5-2:", result52)
result53, err := l.OneResult(ctx, `10 >= 5 && 10 <= 5`) // = 0
if err != nil {
log.Fatal(err)
}
log.Println("Result 5-3:", result53)
}

75
executor.go Normal file
View file

@ -0,0 +1,75 @@
package lexpr
import (
"context"
"fmt"
"strings"
)
func (l *Lexpr) execute(ctx context.Context, tokens <-chan Token) chan Result {
out := make(chan Result)
stack := TokenStack{}
go func() {
defer func() {
for len(stack) > 0 {
ret := stack.Pop()
switch ret.typ {
case str:
out <- Result{Value: ret.value}
case number:
out <- Result{Value: ret.ivalue}
}
}
close(out)
}()
for {
select {
case <-ctx.Done():
return
case tkn, ok := <-tokens:
if !ok {
return
}
switch tkn.typ {
case number:
stack.Push(tkn)
case str:
stack.Push(Token{
typ: str,
value: strings.Trim(tkn.value, `"`),
})
case funct:
fn := l.functions[tkn.value]
if err := fn(&stack); err != nil {
out <- Result{Error: err}
return
}
case op:
op := l.operators[tkn.value]
if err := op.handler(&stack); err != nil {
out <- Result{Error: err}
return
}
case word:
variable, hasVariable := l.variables[strings.ToLower(tkn.value)]
if !hasVariable {
stack.Push(tkn)
continue
}
vtkn, ok := TokenFromAny(variable)
if !ok {
out <- Result{Error: fmt.Errorf("invalid variable value: %+v", variable)}
return
}
stack.Push(vtkn)
case tokError:
out <- Result{Error: fmt.Errorf(tkn.value)}
return
}
}
}
}()
return out
}

3
go.mod Normal file
View file

@ -0,0 +1,3 @@
module go.neonxp.dev/lexpr
go 1.18

12
helpers.go Normal file
View file

@ -0,0 +1,12 @@
package lexpr
import "context"
func (l *Lexpr) OneResult(ctx context.Context, expression string) (any, error) {
select {
case r := <-l.Eval(ctx, expression):
return r.Value, r.Error
case <-ctx.Done():
return nil, nil
}
}

78
itr.go Normal file
View file

@ -0,0 +1,78 @@
package lexpr
import "context"
func infixToRpn(ctx context.Context, tokens <-chan Token) <-chan Token {
out := make(chan Token)
stack := TokenStack{}
go func() {
defer func() {
if len(stack) > 0 {
for {
if stack.Head().typ == lp {
out <- Token{
typ: tokError,
value: "invalid brakets",
}
break
}
out <- stack.Pop()
if len(stack) == 0 {
break
}
}
}
close(out)
}()
for {
select {
case <-ctx.Done():
return
case tkn, ok := <-tokens:
if !ok {
return
}
switch tkn.typ {
case number, word, str, tokError:
out <- tkn
case funct:
stack.Push(tkn)
case sep:
for stack.Head().typ != lp {
if len(stack) == 0 {
out <- Token{
typ: tokError,
value: "no arg separator or opening braket",
}
return
}
out <- stack.Pop()
}
case op:
for len(stack) > 0 && (stack.Head().typ != op || (stack.Head().priority >= tkn.priority)) {
out <- stack.Pop()
}
stack.Push(tkn)
case lp:
stack.Push(tkn)
case rp:
for stack.Head().typ != lp {
if len(stack) == 0 {
out <- Token{
typ: tokError,
value: "no opening braket",
}
return
}
out <- stack.Pop()
}
stack.Pop()
if stack.Head().typ == funct {
out <- stack.Pop()
}
}
}
}
}()
return out
}

149
itr_test.go Normal file
View file

@ -0,0 +1,149 @@
package lexpr
import (
"context"
"reflect"
"testing"
)
func Test_infixToRpn(t *testing.T) {
type args struct {
in []Token
}
tests := []struct {
name string
args args
want []Token
}{
{
name: "simple",
args: args{
in: []Token{
{
typ: funct,
value: "min",
},
{
typ: lp,
},
{
typ: number,
ivalue: 3,
},
{
typ: sep,
},
{
typ: number,
ivalue: 2,
},
{
typ: rp,
},
{
typ: op,
value: "*",
ivalue: 0,
priority: 120,
leftAssoc: false,
},
{
typ: funct,
value: "max",
},
{
typ: lp,
},
{
typ: number,
ivalue: 10,
},
{
typ: sep,
},
{
typ: number,
ivalue: 20,
},
{
typ: rp,
},
{
typ: op,
value: "==",
ivalue: 0,
priority: 20,
leftAssoc: false,
},
{
typ: number,
ivalue: 40,
},
},
},
want: []Token{
{
typ: number,
ivalue: 3,
},
{
typ: number,
ivalue: 2,
},
{
typ: funct,
value: "min",
},
{
typ: number,
ivalue: 10,
},
{
typ: number,
ivalue: 20,
},
{
typ: funct,
value: "max",
},
{
typ: op,
value: "*",
ivalue: 0,
priority: 120,
leftAssoc: false,
},
{
typ: number,
ivalue: 40,
},
{
typ: op,
value: "==",
ivalue: 0,
priority: 20,
leftAssoc: false,
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
inCh := make(chan Token)
go func() {
defer close(inCh)
for _, tk := range tt.args.in {
inCh <- tk
}
}()
gotCh := infixToRpn(context.Background(), inCh)
got := []Token{}
for o := range gotCh {
got = append(got, o)
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("infixToRpn() = %v, want %v", got, tt.want)
}
})
}
}

181
lex.go Normal file
View file

@ -0,0 +1,181 @@
package lexpr
import (
"context"
"strings"
"unicode/utf8"
)
// EOF const.
const EOF rune = -1
// lex holds current scanner state.
type lex struct {
input string // Input string.
start int // Start position of current lexem.
pos int // Pos at input string.
output chan lexem // Lexems channel.
width int // Width of last rune.
}
// newLex returns new scanner for input string.
func newLex() *lex {
return &lex{
input: "",
start: 0,
pos: 0,
output: nil,
width: 0,
}
}
// parse input to lexems.
func (l *lex) parse(ctx context.Context, input string) <-chan lexem {
l.input = input
l.output = make(chan lexem)
go func() {
defer close(l.output)
for {
if ctx.Err() != nil {
return
}
switch {
case l.acceptWhile(" \n\t", false):
l.ignore()
case l.accept("("):
l.emit(lp)
case l.accept(")"):
l.emit(rp)
case l.accept(","):
l.emit(sep)
case scanNumber(l):
l.emit(number)
case scanOps(l):
l.emit(op)
case scanWord(l):
l.emit(word)
case scanQuotedString(l, `"`):
l.emit(str)
case l.peek() == EOF:
return
default:
l.emit(tokError)
return
}
}
}()
return l.output
}
// emit current lexem to output.
func (l *lex) emit(typ lexType) {
l.output <- lexem{
Type: typ,
Value: l.input[l.start:l.pos],
Start: l.start,
End: l.pos,
}
l.start = l.pos
}
// next rune from input.
func (l *lex) next() (r rune) {
if int(l.pos) >= len(l.input) {
l.width = 0
return EOF
}
r, l.width = utf8.DecodeRuneInString(l.input[l.pos:])
l.pos += l.width
return r
}
// back move position to previos rune.
func (l *lex) back() {
l.pos -= l.width
}
// ignore previosly buffered text.
func (l *lex) ignore() {
l.start = l.pos
l.width = 0
}
// peek rune at current position without moving position.
func (l *lex) peek() (r rune) {
r = l.next()
l.back()
return r
}
// accept any rune from valid string. Returns true if next rune was in valid string.
func (l *lex) accept(valid string) bool {
if strings.ContainsRune(valid, l.next()) {
return true
}
l.back()
return false
}
// acceptString returns true if given string was at position.
func (l *lex) acceptString(s string, caseInsentive bool) bool {
input := l.input
if caseInsentive {
input = strings.ToLower(input)
s = strings.ToLower(s)
}
if strings.HasPrefix(input, s) {
l.width = 0
l.pos += len(s)
return true
}
return false
}
// acceptAnyOf substrings. Retuns true if any of substrings was found.
func (l *lex) acceptAnyOf(s []string, caseInsentive bool) bool {
for _, substring := range s {
if l.acceptString(substring, caseInsentive) {
return true
}
}
return false
}
// acceptWhile passing symbols from input while they at `valid` string.
func (l *lex) acceptWhile(valid string, ignoreEscaped bool) bool {
start := l.pos
for {
ch := l.next()
switch {
case ch == EOF:
return false
case ch == '\\' && ignoreEscaped:
l.next()
case !strings.ContainsRune(valid, ch):
l.back()
return l.pos > start
}
}
}
// acceptWhileNot passing symbols from input while they NOT in `invalid` string.
func (l *lex) acceptWhileNot(invalid string, ignoreEscaped bool) bool {
start := l.pos
for {
ch := l.next()
switch {
case ch == EOF:
return false
case ch == '\\' && ignoreEscaped:
l.next()
case strings.ContainsRune(invalid, ch):
l.back()
return l.pos > start
}
}
}
// atStart returns true if current lexem not empty
func (l *lex) atStart() bool {
return l.pos == l.start
}

89
lex_test.go Normal file
View file

@ -0,0 +1,89 @@
package lexpr
import (
"context"
"reflect"
"testing"
)
func Test_lex_Parse(t *testing.T) {
type args struct {
input string
}
tests := []struct {
name string
args args
want []lexem
}{
{
name: "math",
args: args{
input: "min(3, 2) * max(10, 20) == 40",
},
want: []lexem{
{
Type: word,
Value: "min",
}, {
Type: lp,
Value: "(",
}, {
Type: number,
Value: "3",
}, {
Type: sep,
Value: ",",
}, {
Type: number,
Value: "2",
}, {
Type: rp,
Value: ")",
}, {
Type: op,
Value: "*",
}, {
Type: word,
Value: "max",
}, {
Type: lp,
Value: "(",
}, {
Type: number,
Value: "10",
}, {
Type: sep,
Value: ",",
}, {
Type: number,
Value: "20",
}, {
Type: rp,
Value: ")",
}, {
Type: op,
Value: "==",
}, {
Type: number,
Value: "40",
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
l := newLex()
gotCh := l.parse(context.Background(), tt.args.input)
got := []lexem{}
for o := range gotCh {
got = append(got, lexem{
Type: o.Type,
Value: o.Value,
})
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("lex.Parse() = %v, want %v", got, tt.want)
}
})
}
}

26
lexem.go Normal file
View file

@ -0,0 +1,26 @@
package lexpr
// lexem represents part of parsed string.
type lexem struct {
Type lexType // Type of Lexem.
Value string // Value of Lexem.
Start int // Start position at input string.
End int // End position at input string.
}
// lexType represents type of current lexem.
type lexType int
// Some std lexem types
const (
lexEOF lexType = iota
tokError
number
str
word
op
funct
lp
rp
sep
)

52
lexpr.go Normal file
View file

@ -0,0 +1,52 @@
package lexpr
import (
"context"
"strings"
)
type Lexpr struct {
operators map[string]Operator
functions map[string]func(ts *TokenStack) error
variables map[string]any
}
func New(opts ...Opt) *Lexpr {
l := &Lexpr{}
for _, o := range opts {
o(l)
}
return l
}
func (l *Lexpr) Eval(ctx context.Context, expression string) chan Result {
lexer := newLex()
lexems := lexer.parse(ctx, expression)
tokens := l.tokenize(ctx, lexems)
rpnTokens := infixToRpn(ctx, tokens)
return l.execute(ctx, rpnTokens)
}
func (l *Lexpr) SetFunction(name string, fn func(ts *TokenStack) error) *Lexpr {
l.functions[strings.ToLower(name)] = fn
return l
}
func (l *Lexpr) SetOperator(name string, fn func(ts *TokenStack) error, priority int, leftAssoc bool) *Lexpr {
l.operators[strings.ToLower(name)] = Operator{
handler: fn,
priority: priority,
leftAssoc: leftAssoc,
}
return l
}
func (l *Lexpr) SetVariable(name string, value any) *Lexpr {
l.variables[strings.ToLower(name)] = value
return l
}
type Result struct {
Value any
Error error
}

176
lexpr_test.go Normal file
View file

@ -0,0 +1,176 @@
package lexpr
import (
"context"
"reflect"
"testing"
)
func TestLexpr_Eval(t *testing.T) {
type fields struct {
operators map[string]Operator
functions map[string]func(ts *TokenStack) error
variables map[string]any
}
type args struct {
expression string
}
tests := []struct {
name string
fields fields
args args
want any
wantErr bool
}{
{
name: "simple math",
fields: fields{
operators: Operators,
functions: Functions,
variables: map[string]any{},
},
args: args{expression: "2 + 2 * 2"},
want: 6,
wantErr: false,
},
{
name: "complex equal",
fields: fields{
operators: Operators,
functions: Functions,
variables: map[string]any{},
},
args: args{expression: "min(3, 2) * max(10, 20) == 40"},
want: 1,
wantErr: false,
},
{
name: "complex neql",
fields: fields{
operators: Operators,
functions: Functions,
variables: map[string]any{},
},
args: args{expression: "min(3, 2) * max(10, 20) != 40"},
want: 0,
wantErr: false,
},
{
name: "variables",
fields: fields{
operators: Operators,
functions: Functions,
variables: map[string]any{
"svar": "test",
"ivar": int(123),
"fvar": 321.0,
},
},
args: args{
expression: "len(svar) + ivar + fvar",
},
want: 448,
wantErr: false,
},
{
name: "invalid1",
fields: fields{
operators: Operators,
functions: Functions,
variables: map[string]any{},
},
args: args{expression: ")("},
want: nil,
wantErr: true,
},
{
name: "invalid2",
fields: fields{
operators: Operators,
functions: Functions,
variables: map[string]any{},
},
args: args{expression: "var1 + var2"},
want: nil,
wantErr: true,
},
{
name: "invalid3",
fields: fields{
operators: Operators,
functions: Functions,
variables: map[string]any{},
},
args: args{expression: "3 @ 4"},
want: nil,
wantErr: true,
},
{
name: "dot notation",
fields: fields{
operators: Operators,
functions: Functions,
variables: map[string]any{
"j": `{ "one" : { "four": {"five": "six"} }, "two": "three" }`,
},
},
args: args{
expression: `j.one.four.five`,
},
want: `six`,
wantErr: false,
},
{
name: "dot notation with arrays",
fields: fields{
operators: Operators,
functions: Functions,
variables: map[string]any{
"j": `{ "one" : { "four": ["five", "six", "seven"] }, "two": "three" }`,
},
},
args: args{
expression: `j.one.four.1`,
},
want: `six`,
wantErr: false,
},
{
name: "dot notation with arrays and variables",
fields: fields{
operators: Operators,
functions: Functions,
variables: map[string]any{
"j": `{ "one" : { "four": ["five", "six", "seven"] }, "two": "three" }`,
"key1": "one",
"key2": 1,
},
},
args: args{
expression: `j.key1.four.key2`,
},
want: `six`,
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
l := &Lexpr{
operators: tt.fields.operators,
functions: tt.fields.functions,
variables: tt.fields.variables,
}
gotCh := l.Eval(context.Background(), tt.args.expression)
res := <-gotCh
got := res.Value
err := res.Error
if (err != nil) != tt.wantErr {
t.Errorf("Lexpr.Eval() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("Lexpr.Eval() = %v, want %v", got, tt.want)
}
})
}
}

29
options.go Normal file
View file

@ -0,0 +1,29 @@
package lexpr
type Opt func(*Lexpr)
func WithOperators(operators map[string]Operator) Opt {
return func(l *Lexpr) {
l.operators = operators
}
}
func WithFunctions(functions map[string]func(ts *TokenStack) error) Opt {
return func(l *Lexpr) {
l.functions = functions
}
}
func WithValues(variables map[string]any) Opt {
return func(l *Lexpr) {
l.variables = variables
}
}
func WithDefaults() Opt {
return func(l *Lexpr) {
l.operators = Operators
l.functions = Functions
l.variables = map[string]any{}
}
}

50
scanners.go Normal file
View file

@ -0,0 +1,50 @@
package lexpr
import (
"strings"
)
const (
digits = "0123456789"
alpha = "qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM"
chars = "+-*/=<>@&|:!."
)
// scanNumber simplest scanner that accepts decimal int and float.
func scanNumber(l *lex) bool {
l.acceptWhile(digits, false)
if l.atStart() {
// not found any digit
return false
}
l.accept(".")
l.acceptWhile(digits, false)
return !l.atStart()
}
// scanWord returns true if next input token contains alphanum sequence that not starts from digit and not contains.
// spaces or special characters.
func scanWord(l *lex) bool {
if !l.accept(alpha) {
return false
}
l.acceptWhile(alpha+digits, false)
return true
}
func scanOps(l *lex) bool {
return l.acceptWhile(chars, false)
}
// scanQuotedString returns true if next input tokens is quoted string. Can be used with any type of quotes.
func scanQuotedString(l *lex, quote string) bool {
start := l.pos
if !strings.ContainsRune(quote, l.next()) {
l.pos = start
return false
}
if l.acceptWhileNot(quote, true) {
l.next()
}
return !l.atStart()
}

23
stack.go Normal file
View file

@ -0,0 +1,23 @@
package lexpr
type TokenStack []Token
func (s *TokenStack) Push(item Token) {
*s = append(*s, item)
}
func (s *TokenStack) Pop() (item Token) {
if len(*s) == 0 {
return
}
*s, item = (*s)[:len(*s)-1], (*s)[len(*s)-1]
return item
}
func (s *TokenStack) Head() (item Token) {
if len(*s) == 0 {
return
}
return (*s)[len(*s)-1]
}

415
std.go Normal file
View file

@ -0,0 +1,415 @@
package lexpr
import (
"encoding/json"
"fmt"
"math"
"strconv"
"strings"
)
type Operator struct {
handler func(ts *TokenStack) error
priority int
leftAssoc bool
}
var Operators = map[string]Operator{
".": {
handler: func(ts *TokenStack) error {
t2 := ts.Pop()
t1 := ts.Pop()
switch t2.typ {
case str, word:
m := map[string]json.RawMessage{}
if err := json.Unmarshal([]byte(t1.value), &m); err != nil {
return fmt.Errorf("invalid json %s err: %s", t1.value, err.Error())
}
val, ok := m[t2.value]
if !ok {
return fmt.Errorf("invalid json key %s key: %s", t1.value, t2.value)
}
ts.Push(Token{
typ: str,
value: strings.Trim(string(val), `"`),
})
case number:
m := []json.RawMessage{}
if err := json.Unmarshal([]byte(t1.value), &m); err != nil {
return fmt.Errorf("invalid json %s err: %s", t1.value, err.Error())
}
if len(m) <= t2.ivalue {
return fmt.Errorf("invalid json key %s key: %s", t1.value, t2.value)
}
val := m[t2.ivalue]
ts.Push(Token{
typ: str,
value: strings.Trim(string(val), `"`),
})
default:
return fmt.Errorf("invalid json key: %+v", t2)
}
return nil
},
priority: 140,
leftAssoc: false,
},
// Math operators
"**": {
handler: func(ts *TokenStack) error {
t1 := ts.Pop()
t2 := ts.Pop()
if t1.typ != number || t2.typ != number {
return fmt.Errorf("Both arguments must be number, got op1 = %+v, op2 = %+v", t1, t2)
}
ts.Push(Token{
typ: number,
ivalue: int(math.Pow(float64(t1.ivalue), float64(t2.ivalue))),
})
return nil
},
priority: 130,
leftAssoc: true,
},
"*": {
handler: func(ts *TokenStack) error {
t1 := ts.Pop()
t2 := ts.Pop()
if t1.typ != number || t2.typ != number {
return fmt.Errorf("Both arguments must be number, got op1 = %+v, op2 = %+v", t1, t2)
}
ts.Push(Token{
typ: number,
ivalue: t1.ivalue * t2.ivalue,
})
return nil
},
priority: 120,
leftAssoc: false,
},
"/": {
handler: func(ts *TokenStack) error {
t1 := ts.Pop()
t2 := ts.Pop()
if t1.typ != number || t2.typ != number {
return fmt.Errorf("Both arguments must be number, got op1 = %+v, op2 = %+v", t1, t2)
}
ts.Push(Token{
typ: number,
ivalue: t1.ivalue / t2.ivalue,
})
return nil
},
priority: 120,
leftAssoc: false,
},
"%": {
handler: func(ts *TokenStack) error {
t1 := ts.Pop()
t2 := ts.Pop()
if t1.typ != number || t2.typ != number {
return fmt.Errorf("Both arguments must be number, got op1 = %+v, op2 = %+v", t1, t2)
}
ts.Push(Token{
typ: number,
ivalue: t1.ivalue % t2.ivalue,
})
return nil
},
priority: 120,
leftAssoc: false,
},
"+": {
handler: func(ts *TokenStack) error {
t1 := ts.Pop()
t2 := ts.Pop()
if t1.typ != number || t2.typ != number {
return fmt.Errorf("Both arguments must be number, got op1 = %+v, op2 = %+v", t1, t2)
}
ts.Push(Token{
typ: number,
ivalue: t1.ivalue + t2.ivalue,
})
return nil
},
priority: 110,
leftAssoc: false,
},
"-": {
handler: func(ts *TokenStack) error {
t1 := ts.Pop()
t2 := ts.Pop()
if t1.typ != number || t2.typ != number {
return fmt.Errorf("Both arguments must be number, got op1 = %+v, op2 = %+v", t1, t2)
}
ts.Push(Token{
typ: number,
ivalue: t1.ivalue - t2.ivalue,
})
return nil
},
priority: 110,
leftAssoc: false,
},
// Logic operators
"!": {
handler: func(ts *TokenStack) error {
t := ts.Pop()
switch ts.Pop().typ {
case number:
t.ivalue = ^t.ivalue
ts.Push(t)
default:
return fmt.Errorf("Argument must be number, got %+v", t)
}
return nil
},
priority: 50,
leftAssoc: false,
},
">": {
handler: func(ts *TokenStack) error {
t1 := ts.Pop()
t2 := ts.Pop()
if t1.typ != number || t2.typ != number {
return fmt.Errorf("Both arguments must be number, got op1 = %+v, op2 = %+v", t1, t2)
}
r := 0
if t2.ivalue > t1.ivalue {
r = 1
}
ts.Push(Token{
typ: number,
ivalue: r,
})
return nil
},
priority: 20,
leftAssoc: false,
},
">=": {
handler: func(ts *TokenStack) error {
t1 := ts.Pop()
t2 := ts.Pop()
if t1.typ != number || t2.typ != number {
return fmt.Errorf("Both arguments must be number, got op1 = %+v, op2 = %+v", t1, t2)
}
r := 0
if t2.ivalue >= t1.ivalue {
r = 1
}
ts.Push(Token{
typ: number,
ivalue: r,
})
return nil
},
priority: 20,
leftAssoc: false,
},
"<": {
handler: func(ts *TokenStack) error {
t1 := ts.Pop()
t2 := ts.Pop()
if t1.typ != number || t2.typ != number {
return fmt.Errorf("Both arguments must be number, got op1 = %+v, op2 = %+v", t1, t2)
}
r := 0
if t2.ivalue < t1.ivalue {
r = 1
}
ts.Push(Token{
typ: number,
ivalue: r,
})
return nil
},
priority: 20,
leftAssoc: false,
},
"<=": {
handler: func(ts *TokenStack) error {
t1 := ts.Pop()
t2 := ts.Pop()
if t1.typ != number || t2.typ != number {
return fmt.Errorf("Both arguments must be number, got op1 = %+v, op2 = %+v", t1, t2)
}
r := 0
if t2.ivalue <= t1.ivalue {
r = 1
}
ts.Push(Token{
typ: number,
ivalue: r,
})
return nil
},
priority: 20,
leftAssoc: false,
},
"==": {
handler: func(ts *TokenStack) error {
t1 := ts.Pop()
t2 := ts.Pop()
r := 0
if t1.typ == number && t2.typ == number && t1.ivalue == t2.ivalue {
r = 1
} else if t1.value == t2.value {
r = 1
}
ts.Push(Token{
typ: number,
ivalue: r,
})
return nil
},
priority: 20,
leftAssoc: false,
},
"!=": {
handler: func(ts *TokenStack) error {
t1 := ts.Pop()
t2 := ts.Pop()
r := 0
if t1.typ == number && t2.typ == number && t1.ivalue != t2.ivalue {
r = 1
} else if t1.value != t2.value {
r = 1
}
ts.Push(Token{
typ: number,
ivalue: r,
})
return nil
},
priority: 20,
leftAssoc: false,
},
"&&": {
handler: func(ts *TokenStack) error {
t1 := ts.Pop()
t2 := ts.Pop()
if t1.typ != number || t2.typ != number {
return fmt.Errorf("Both arguments must be number, got op1 = %+v, op2 = %+v", t1, t2)
}
b1 := true
b2 := true
if t1.ivalue == 0 {
b1 = false
}
if t2.ivalue == 0 {
b2 = false
}
r := 0
if b1 && b2 {
r = 1
}
ts.Push(Token{
typ: number,
ivalue: r,
})
return nil
},
priority: 10,
leftAssoc: false,
},
"||": {
handler: func(ts *TokenStack) error {
t1 := ts.Pop()
t2 := ts.Pop()
if t1.typ != number || t2.typ != number {
return fmt.Errorf("Both arguments must be number, got op1 = %+v, op2 = %+v", t1, t2)
}
b1 := true
b2 := true
if t1.ivalue == 0 {
b1 = false
}
if t2.ivalue == 0 {
b2 = false
}
r := 0
if b1 || b2 {
r = 1
}
ts.Push(Token{
typ: number,
ivalue: r,
})
return nil
},
priority: 0,
leftAssoc: false,
},
}
var Functions = map[string]func(ts *TokenStack) error{
"max": func(ts *TokenStack) error {
t1 := ts.Pop()
t2 := ts.Pop()
if t1.typ != number || t2.typ != number {
return fmt.Errorf("Both arguments must be number, got op1 = %+v, op2 = %+v", t1, t2)
}
max := t1.ivalue
if t2.ivalue > max {
max = t2.ivalue
}
ts.Push(Token{
typ: number,
ivalue: max,
})
return nil
},
"min": func(ts *TokenStack) error {
t1 := ts.Pop()
t2 := ts.Pop()
if t1.typ != number || t2.typ != number {
return fmt.Errorf("Both arguments must be number, got op1 = %+v, op2 = %+v", t1, t2)
}
min := t1.ivalue
if t2.ivalue < min {
min = t2.ivalue
}
ts.Push(Token{
typ: number,
ivalue: min,
})
return nil
},
"len": func(ts *TokenStack) error {
t := ts.Pop()
ts.Push(Token{
typ: number,
ivalue: len(t.value),
})
return nil
},
"atoi": func(ts *TokenStack) error {
t := ts.Pop()
if t.typ != str && t.typ != word {
return fmt.Errorf("atoi requires string argument, got %+v", t)
}
n, err := strconv.Atoi(t.value)
if err != nil {
return err
}
ts.Push(Token{
typ: number,
ivalue: n,
})
return nil
},
"itoa": func(ts *TokenStack) error {
t := ts.Pop()
if t.typ != number {
return fmt.Errorf("itoa requires number argument, got %+v", t)
}
s := strconv.Itoa(t.ivalue)
ts.Push(Token{
typ: str,
value: s,
})
return nil
},
}

80
token.go Normal file
View file

@ -0,0 +1,80 @@
package lexpr
type Token struct {
typ lexType
value string
ivalue int
priority int
leftAssoc bool
}
func (t Token) Number() (int, bool) {
return t.ivalue, t.typ == number
}
func (t Token) String() (string, bool) {
return t.value, t.typ == str
}
func (t Token) Word() (string, bool) {
return t.value, t.typ == word
}
func TokenFromAny(variable any) (Token, bool) {
if s, ok := variable.(string); ok {
return Token{
typ: str,
value: s,
}, true
}
if n, ok := variable.(int); ok {
return Token{
typ: number,
ivalue: n,
}, true
}
if n, ok := variable.(float64); ok {
return Token{
typ: number,
ivalue: int(n),
}, true
}
if n, ok := variable.(float32); ok {
return Token{
typ: number,
ivalue: int(n),
}, true
}
if b, ok := variable.(bool); ok {
n := 0
if b {
n = 1
}
return Token{
typ: number,
ivalue: n,
}, true
}
return Token{}, false
}
func TokenFromWord(wordName string) Token {
return Token{
typ: word,
value: wordName,
}
}
func TokenFromString(s string) Token {
return Token{
typ: str,
value: s,
}
}
func TokenFromInt(n int) Token {
return Token{
typ: number,
ivalue: n,
}
}

93
tokenizer.go Normal file
View file

@ -0,0 +1,93 @@
package lexpr
import (
"context"
"fmt"
"strconv"
)
func (l *Lexpr) tokenize(ctx context.Context, lexems <-chan lexem) <-chan Token {
out := make(chan Token)
go func() {
defer close(out)
for {
select {
case <-ctx.Done():
return
case lexem, ok := <-lexems:
if !ok {
return
}
switch {
case lexem.Type == lp:
out <- Token{
typ: lp,
}
case lexem.Type == rp:
out <- Token{
typ: rp,
}
case lexem.Type == sep:
out <- Token{
typ: sep,
}
case lexem.Type == number:
ivalue, _ := strconv.Atoi(lexem.Value)
out <- Token{
typ: number,
ivalue: ivalue,
}
case lexem.Type == str:
out <- Token{
typ: str,
value: lexem.Value,
}
case lexem.Type == op:
o, isOp := l.operators[lexem.Value]
if !isOp {
out <- Token{
typ: tokError,
value: fmt.Sprintf("unknown operator: %s", lexem.Value),
}
return
}
out <- Token{
typ: op,
value: lexem.Value,
priority: o.priority,
leftAssoc: o.leftAssoc,
}
case lexem.Type == word:
o, isOp := l.operators[lexem.Value]
_, isFunc := l.functions[lexem.Value]
switch {
case isOp:
out <- Token{
typ: op,
value: lexem.Value,
priority: o.priority,
leftAssoc: o.leftAssoc,
}
case isFunc:
out <- Token{
typ: funct,
value: lexem.Value,
}
default:
out <- Token{
typ: word,
value: lexem.Value,
}
}
case lexem.Type == tokError:
out <- Token{
typ: tokError,
value: lexem.Value,
}
return
}
}
}
}()
return out
}

157
tokenizer_test.go Normal file
View file

@ -0,0 +1,157 @@
package lexpr
import (
"context"
"reflect"
"testing"
)
func TestLexpr_tokenize(t *testing.T) {
type args struct {
lexems []lexem
}
tests := []struct {
name string
args args
want []Token
}{
{
name: "math",
args: args{
lexems: []lexem{
{
Type: word,
Value: "min",
}, {
Type: lp,
Value: "(",
}, {
Type: number,
Value: "3",
}, {
Type: sep,
Value: ",",
}, {
Type: number,
Value: "2",
}, {
Type: rp,
Value: ")",
}, {
Type: word,
Value: "*",
}, {
Type: word,
Value: "max",
}, {
Type: lp,
Value: "(",
}, {
Type: number,
Value: "10",
}, {
Type: sep,
Value: ",",
}, {
Type: number,
Value: "20",
}, {
Type: rp,
Value: ")",
}, {
Type: word,
Value: "==",
}, {
Type: number,
Value: "40",
},
},
},
want: []Token{
{
typ: funct,
value: "min",
},
{
typ: lp,
},
{
typ: number,
ivalue: 3,
},
{
typ: sep,
},
{
typ: number,
ivalue: 2,
},
{
typ: rp,
},
{
typ: op,
value: "*",
ivalue: 0,
priority: 120,
leftAssoc: false,
},
{
typ: funct,
value: "max",
},
{
typ: lp,
},
{
typ: number,
ivalue: 10,
},
{
typ: sep,
},
{
typ: number,
ivalue: 20,
},
{
typ: rp,
},
{
typ: op,
value: "==",
ivalue: 0,
priority: 20,
leftAssoc: false,
},
{
typ: number,
ivalue: 40,
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
l := &Lexpr{
operators: Operators,
functions: Functions,
}
lexemsCh := make(chan lexem)
go func() {
defer close(lexemsCh)
for _, l := range tt.args.lexems {
lexemsCh <- l
}
}()
gotCh := l.tokenize(context.Background(), lexemsCh)
got := []Token{}
for o := range gotCh {
got = append(got, o)
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("Lexpr.tokenize() = \n%v, want \n%v", got, tt.want)
}
})
}
}