From 667cc5faaea829220ee0218b4d4fcf9044262000 Mon Sep 17 00:00:00 2001 From: Tanner Stirrat Date: Mon, 28 Oct 2024 15:28:45 -0600 Subject: [PATCH 1/5] Add any and all to keywords --- pkg/composableschemadsl/lexer/lex_def.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pkg/composableschemadsl/lexer/lex_def.go b/pkg/composableschemadsl/lexer/lex_def.go index de178c3662..5e56ea49ec 100644 --- a/pkg/composableschemadsl/lexer/lex_def.go +++ b/pkg/composableschemadsl/lexer/lex_def.go @@ -80,6 +80,8 @@ var keywords = map[string]struct{}{ "with": {}, "from": {}, "import": {}, + "all": {}, + "any": {}, } // IsKeyword returns whether the specified input string is a reserved keyword. From a0130e2ce7ac58e92152665530ca588fc7e895ac Mon Sep 17 00:00:00 2001 From: Tanner Stirrat Date: Mon, 28 Oct 2024 15:29:12 -0600 Subject: [PATCH 2/5] Update test to reflect new behavior --- .../tests/arrowillegalfunc.zed.expected | 38 +++++++++---------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/pkg/composableschemadsl/parser/tests/arrowillegalfunc.zed.expected b/pkg/composableschemadsl/parser/tests/arrowillegalfunc.zed.expected index 803106a8e2..7951a7f1bf 100644 --- a/pkg/composableschemadsl/parser/tests/arrowillegalfunc.zed.expected +++ b/pkg/composableschemadsl/parser/tests/arrowillegalfunc.zed.expected @@ -1,32 +1,32 @@ NodeTypeFile - end-rune = 48 + end-rune = 45 input-source = arrow illegal function test start-rune = 0 child-node => NodeTypeDefinition definition-name = resource - end-rune = 48 + end-rune = 45 input-source = arrow illegal function test start-rune = 0 child-node => NodeTypePermission - end-rune = 48 + end-rune = 45 input-source = arrow illegal function test relation-name = view start-rune = 26 child-node => NodeTypeError - end-rune = 48 - error-message = Expected 'any' or 'all' for arrow function, found: foo - error-source = ( + end-rune = 45 + error-message = Expected one of keywords any, all; found token TokenTypeIdentifier + error-source = foo input-source = arrow illegal function test - start-rune = 49 + start-rune = 46 NodeTypeError - end-rune = 48 - error-message = Expected right hand expression, found: TokenTypeLeftParen - error-source = ( + end-rune = 45 + error-message = Expected right hand expression, found: TokenTypeIdentifier + error-source = foo input-source = arrow illegal function test - start-rune = 49 + start-rune = 46 compute-expression => NodeTypeIdentifier end-rune = 44 @@ -34,14 +34,14 @@ NodeTypeFile input-source = arrow illegal function test start-rune = 44 NodeTypeError - end-rune = 48 - error-message = Expected end of statement or definition, found: TokenTypeLeftParen - error-source = ( + end-rune = 45 + error-message = Expected end of statement or definition, found: TokenTypeIdentifier + error-source = foo input-source = arrow illegal function test - start-rune = 49 + start-rune = 46 NodeTypeError - end-rune = 48 - error-message = Unexpected token at root level: TokenTypeLeftParen - error-source = ( + end-rune = 45 + error-message = Unexpected token at root level: TokenTypeIdentifier + error-source = foo input-source = arrow illegal function test - start-rune = 49 \ No newline at end of file + start-rune = 46 \ No newline at end of file From d9fb4dea2c7d8d9515623dbf85aeeec83994d596 Mon Sep 17 00:00:00 2001 From: Tanner Stirrat Date: Mon, 28 Oct 2024 15:29:25 -0600 Subject: [PATCH 3/5] Add consumeKeywords function --- pkg/composableschemadsl/parser/parser_impl.go | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/pkg/composableschemadsl/parser/parser_impl.go b/pkg/composableschemadsl/parser/parser_impl.go index 9f22721d43..03b005faac 100644 --- a/pkg/composableschemadsl/parser/parser_impl.go +++ b/pkg/composableschemadsl/parser/parser_impl.go @@ -2,6 +2,7 @@ package parser import ( "fmt" + "strings" "github.com/authzed/spicedb/pkg/composableschemadsl/dslshape" "github.com/authzed/spicedb/pkg/composableschemadsl/input" @@ -197,6 +198,27 @@ func (p *sourceParser) tryConsumeKeyword(keyword string) bool { return true } +// consumeKeywords consumes any of a set of keywords or adds an error node +func (p *sourceParser) consumeKeywords(keywords ...string) (string, bool) { + keyword, ok := p.tryConsumeKeywords(keywords...) + if !ok { + p.emitErrorf("Expected one of keywords %s; found token %v", strings.Join(keywords, ", "), p.currentToken.Kind) + return "", false + } + return keyword, true +} + +// tryConsumeKeyword attempts to consume one of a set of expected keyword tokens. +func (p *sourceParser) tryConsumeKeywords(keywords ...string) (string, bool) { + for _, keyword := range keywords { + if p.isKeyword(keyword) { + p.consumeToken() + return keyword, true + } + } + return "", false +} + // cosumeIdentifier consumes an expected identifier token or adds an error node. func (p *sourceParser) consumeIdentifier() (string, bool) { token, ok := p.tryConsume(lexer.TokenTypeIdentifier) From 2c33d96797d22230d0b46b33b6b60540c2081dac Mon Sep 17 00:00:00 2001 From: Tanner Stirrat Date: Mon, 28 Oct 2024 15:29:43 -0600 Subject: [PATCH 4/5] Update parser behavior to reflect new keywords --- pkg/composableschemadsl/parser/parser.go | 25 ++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/pkg/composableschemadsl/parser/parser.go b/pkg/composableschemadsl/parser/parser.go index 1cc1b2a863..3fa95d402b 100644 --- a/pkg/composableschemadsl/parser/parser.go +++ b/pkg/composableschemadsl/parser/parser.go @@ -208,7 +208,7 @@ func (p *sourceParser) consumeCaveatTypeReference() AstNode { typeRefNode := p.startNode(dslshape.NodeTypeCaveatTypeReference) defer p.mustFinishNode() - name, ok := p.consumeIdentifier() + name, ok := p.consumeCaveatTypeIdentifier() if !ok { return typeRefNode } @@ -234,6 +234,21 @@ func (p *sourceParser) consumeCaveatTypeReference() AstNode { return typeRefNode } +// "any" is both a keyword and a valid caveat type, so a caveat type identifier +// can either be a keyword or an identifier. This wraps around that. +func (p *sourceParser) consumeCaveatTypeIdentifier() (string, bool) { + if ok := p.tryConsumeKeyword("any"); ok { + return "any", true + } + + identifier, ok := p.tryConsume(lexer.TokenTypeIdentifier) + if !ok { + p.emitErrorf("Expected keyword \"any\" or a valid identifier, found token %v", p.currentToken.Kind) + return "", false + } + return identifier.Value, true +} + // consumeDefinition attempts to consume a single schema definition. // ```definition somedef { ... }``` func (p *sourceParser) consumeDefinition() AstNode { @@ -483,17 +498,11 @@ func (p *sourceParser) tryConsumeArrowExpression() (AstNode, bool) { rightNodeBuilder := func(leftNode AstNode, operatorToken lexer.Lexeme) (AstNode, bool) { // Check for an arrow function. if operatorToken.Kind == lexer.TokenTypePeriod { - functionName, ok := p.consumeIdentifier() + functionName, ok := p.consumeKeywords("any", "all") if !ok { return nil, false } - // TODO(jschorr): Change to keywords in schema v2. - if functionName != "any" && functionName != "all" { - p.emitErrorf("Expected 'any' or 'all' for arrow function, found: %s", functionName) - return nil, false - } - if _, ok := p.consume(lexer.TokenTypeLeftParen); !ok { return nil, false } From a7c83328010666d580de999108255a89bc6a60ad Mon Sep 17 00:00:00 2001 From: Tanner Stirrat Date: Mon, 28 Oct 2024 15:43:52 -0600 Subject: [PATCH 5/5] Make lexer tests exhaustive, update --- pkg/composableschemadsl/lexer/lex_test.go | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/pkg/composableschemadsl/lexer/lex_test.go b/pkg/composableschemadsl/lexer/lex_test.go index 9bf792723e..6d40fa1eb9 100644 --- a/pkg/composableschemadsl/lexer/lex_test.go +++ b/pkg/composableschemadsl/lexer/lex_test.go @@ -57,6 +57,14 @@ var lexerTests = []lexerTest{ {"minus", "-", []Lexeme{{TokenTypeMinus, 0, "-", ""}, tEOF}}, {"keyword", "definition", []Lexeme{{TokenTypeKeyword, 0, "definition", ""}, tEOF}}, + {"keyword", "caveat", []Lexeme{{TokenTypeKeyword, 0, "caveat", ""}, tEOF}}, + {"keyword", "relation", []Lexeme{{TokenTypeKeyword, 0, "relation", ""}, tEOF}}, + {"keyword", "permission", []Lexeme{{TokenTypeKeyword, 0, "permission", ""}, tEOF}}, + {"keyword", "nil", []Lexeme{{TokenTypeKeyword, 0, "nil", ""}, tEOF}}, + {"keyword", "with", []Lexeme{{TokenTypeKeyword, 0, "with", ""}, tEOF}}, + {"keyword", "from", []Lexeme{{TokenTypeKeyword, 0, "from", ""}, tEOF}}, + {"keyword", "import", []Lexeme{{TokenTypeKeyword, 0, "import", ""}, tEOF}}, + {"keyword", "all", []Lexeme{{TokenTypeKeyword, 0, "all", ""}, tEOF}}, {"keyword", "nil", []Lexeme{{TokenTypeKeyword, 0, "nil", ""}, tEOF}}, {"identifier", "define", []Lexeme{{TokenTypeIdentifier, 0, "define", ""}, tEOF}}, {"typepath", "foo/bar", []Lexeme{ @@ -251,7 +259,7 @@ var lexerTests = []lexerTest{ {"dot access", "foo.all(something)", []Lexeme{ {TokenTypeIdentifier, 0, "foo", ""}, {TokenTypePeriod, 0, ".", ""}, - {TokenTypeIdentifier, 0, "all", ""}, + {TokenTypeKeyword, 0, "all", ""}, {TokenTypeLeftParen, 0, "(", ""}, {TokenTypeIdentifier, 0, "something", ""}, {TokenTypeRightParen, 0, ")", ""},