Skip to content

Commit

Permalink
resolver: Implement pipe with capture and replace
Browse files Browse the repository at this point in the history
For the main scenario we have an expression like [1] this expression
uses a capture reference path as pipe in and a replace expression as
pipe out. This change convert pipe as the first argument on the ternary
replace operation.

[1] capture.base-iface-routes | routes.running.next-hop-interface:="br1"

Signed-off-by: Quique Llorente <[email protected]>
  • Loading branch information
qinqon committed Dec 7, 2021
1 parent 0649f70 commit e40c864
Show file tree
Hide file tree
Showing 4 changed files with 139 additions and 7 deletions.
10 changes: 8 additions & 2 deletions nmpolicy/internal/lexer/token.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,14 @@ const (
NUMBER
STRING

DOT // .
PIPE // |
DOT // .

operatorsBegin
PIPE // |
REPLACE // :=
EQFILTER // ==
MERGE // +
operatorsEnd

TRUE // true
FALSE // false
Expand All @@ -56,6 +58,10 @@ func (t TokenType) String() string {
return tokens[t]
}

func (t TokenType) IsOperator() bool {
return t > operatorsBegin && t < operatorsEnd
}

type Token struct {
Position int
Type TokenType
Expand Down
7 changes: 7 additions & 0 deletions nmpolicy/internal/parser/errors.go
Original file line number Diff line number Diff line change
Expand Up @@ -78,3 +78,10 @@ func invalidExpressionError(msg string) *parserError {
msg: msg,
}
}

func invalidPipeError(msg string) *parserError {
return &parserError{
prefix: "invalid pipe",
msg: msg,
}
}
46 changes: 41 additions & 5 deletions nmpolicy/internal/parser/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ type parser struct {
tokens []lexer.Token
currentTokenIdx int
lastNode *ast.Node
pipedInNode *ast.Node
}

func New() Parser {
Expand Down Expand Up @@ -75,15 +76,26 @@ func (p *parser) parse() (ast.Node, error) {
if err := p.parseReplace(); err != nil {
return ast.Node{}, err
}
} else if p.currentToken().Type == lexer.PIPE {
if err := p.parsePipe(); err != nil {
return ast.Node{}, err
}
} else {
return ast.Node{}, invalidExpressionError(fmt.Sprintf("unexpected token `%+v`", p.currentToken().Literal))
}
p.nextToken()
}
if p.pipedInNode != nil {
return ast.Node{}, invalidPipeError("missing pipe out expression")
}
return p.lastEmitedNode(), nil
}

func (p *parser) lastEmitedNode() ast.Node {
if p.lastNode == nil {
return ast.Node{}, nil
return ast.Node{}
}
return *p.lastNode, nil
return *p.lastNode
}

func (p *parser) nextToken() {
Expand Down Expand Up @@ -169,7 +181,7 @@ func (p *parser) parsePath() error {
}
path := append(*operator.Path, *p.lastNode)
operator.Path = &path
} else if p.currentToken().Type != lexer.EOF && p.currentToken().Type != lexer.EQFILTER && p.currentToken().Type != lexer.REPLACE {
} else if p.currentToken().Type != lexer.EOF && !p.currentToken().Type.IsOperator() {
return invalidPathError("missing dot")
} else {
// Token has not being consumed let's go back.
Expand All @@ -192,7 +204,9 @@ func (p *parser) parseEqFilter() error {
if p.lastNode.Path == nil {
return invalidEqualityFilterError("left hand argument is not a path")
}
operator.EqFilter[0].Terminal = ast.CurrentStateIdentity()

p.fillInPipedInOrCurrentState(&operator.EqFilter[0])

operator.EqFilter[1] = *p.lastNode

p.nextToken()
Expand Down Expand Up @@ -228,7 +242,9 @@ func (p *parser) parseReplace() error {
if p.lastNode.Path == nil {
return invalidReplaceError("left hand argument is not a path")
}
operator.Replace[0].Terminal = ast.CurrentStateIdentity()

p.fillInPipedInOrCurrentState(&operator.Replace[0])

operator.Replace[1] = *p.lastNode

p.nextToken()
Expand All @@ -245,3 +261,23 @@ func (p *parser) parseReplace() error {
p.lastNode = operator
return nil
}

func (p *parser) fillInPipedInOrCurrentState(node *ast.Node) {
if p.pipedInNode != nil {
*node = *p.pipedInNode
p.pipedInNode = nil
} else {
node.Terminal = ast.CurrentStateIdentity()
}
}

func (p *parser) parsePipe() error {
if p.lastNode == nil {
return invalidPipeError("missing pipe in expression")
}
if p.lastNode.Path == nil {
return invalidPipeError("only paths can be piped in")
}
p.pipedInNode = p.lastNode
return nil
}
83 changes: 83 additions & 0 deletions nmpolicy/internal/parser/parser_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ func TestParser(t *testing.T) {
testParsePath(t)
testParseEqFilter(t)
testParseReplace(t)
testParseCapturePipeReplace(t)

testParseBasicFailures(t)
testParsePathFailures(t)
Expand Down Expand Up @@ -123,6 +124,43 @@ func testParseEqFilterFailure(t *testing.T) {
eof(),
),
),
expectError(`invalid pipe: missing pipe in expression`,
fromTokens(
pipe(),
identity("routes"),
dot(),
identity("running"),
dot(),
identity("next-hop-interface"),
replace(),
str("br1"),
eof(),
),
),
expectError(`invalid pipe: missing pipe out expression`,
fromTokens(
identity("capture"),
dot(),
identity("default-gw"),
pipe(),
eof(),
),
),

expectError(`invalid pipe: only paths can be piped in`,
fromTokens(
str("foo"),
pipe(),
identity("routes"),
dot(),
identity("running"),
dot(),
identity("next-hop-interface"),
replace(),
str("br1"),
eof(),
),
),
}
runTest(t, tests)
}
Expand Down Expand Up @@ -308,6 +346,47 @@ replace:
runTest(t, tests)
}

func testParseCapturePipeReplace(t *testing.T) {
var tests = []test{
expectAST(t, `
pos: 52
replace:
- pos: 0
path:
- pos: 0
identity: capture
- pos: 8
identity: default-gw
- pos: 19
path:
- pos: 19
identity: routes
- pos: 26
identity: running
- pos: 34
identity: next-hop-interface
- pos: 54
string: br1
`,
fromTokens(
identity("capture"),
dot(),
identity("default-gw"),
pipe(),
identity("routes"),
dot(),
identity("running"),
dot(),
identity("next-hop-interface"),
replace(),
str("br1"),
eof(),
),
),
}
runTest(t, tests)
}

func testParserReuse(t *testing.T) {
p := parser.New()
testToRun1 := expectAST(t, `
Expand Down Expand Up @@ -452,3 +531,7 @@ func eqfilter() lexer.Token {
func replace() lexer.Token {
return lexer.Token{Type: lexer.REPLACE, Literal: ":="}
}

func pipe() lexer.Token {
return lexer.Token{Type: lexer.PIPE, Literal: "|"}
}

0 comments on commit e40c864

Please sign in to comment.