Merge pull request #111008 from cici37/bumpCEL

Bump cel-go to v0.12.3
This commit is contained in:
Kubernetes Prow Robot
2022-07-14 03:03:04 -07:00
committed by GitHub
134 changed files with 4157 additions and 1799 deletions

View File

@@ -4,6 +4,8 @@
package antlr
import "sync"
var ATNInvalidAltNumber int
type ATN struct {
@@ -37,6 +39,10 @@ type ATN struct {
ruleToTokenType []int
states []ATNState
mu sync.Mutex
stateMu sync.RWMutex
edgeMu sync.RWMutex
}
func NewATN(grammarType int, maxTokenType int) *ATN {
@@ -59,14 +65,15 @@ func (a *ATN) NextTokensInContext(s ATNState, ctx RuleContext) *IntervalSet {
// in s and staying in same rule. Token.EPSILON is in set if we reach end of
// rule.
func (a *ATN) NextTokensNoContext(s ATNState) *IntervalSet {
if s.GetNextTokenWithinRule() != nil {
return s.GetNextTokenWithinRule()
a.mu.Lock()
defer a.mu.Unlock()
iset := s.GetNextTokenWithinRule()
if iset == nil {
iset = a.NextTokensInContext(s, nil)
iset.readOnly = true
s.SetNextTokenWithinRule(iset)
}
s.SetNextTokenWithinRule(a.NextTokensInContext(s, nil))
s.GetNextTokenWithinRule().readOnly = true
return s.GetNextTokenWithinRule()
return iset
}
func (a *ATN) NextTokens(s ATNState, ctx RuleContext) *IntervalSet {

View File

@@ -104,7 +104,7 @@ func (b *BaseATNConfigSet) Alts() *BitSet {
func NewBaseATNConfigSet(fullCtx bool) *BaseATNConfigSet {
return &BaseATNConfigSet{
cachedHash: -1,
configLookup: NewArray2DHashSetWithCap(hashATNConfig, equalATNConfigs, 16, 2),
configLookup: newArray2DHashSetWithCap(hashATNConfig, equalATNConfigs, 16, 2),
fullCtx: fullCtx,
}
}
@@ -155,7 +155,7 @@ func (b *BaseATNConfigSet) Add(config ATNConfig, mergeCache *DoubleDict) bool {
}
func (b *BaseATNConfigSet) GetStates() Set {
states := NewArray2DHashSet(nil, nil)
states := newArray2DHashSet(nil, nil)
for i := 0; i < len(b.configs); i++ {
states.Add(b.configs[i].GetState())
@@ -283,7 +283,7 @@ func (b *BaseATNConfigSet) Clear() {
b.configs = make([]ATNConfig, 0)
b.cachedHash = -1
b.configLookup = NewArray2DHashSet(nil, equalATNConfigs)
b.configLookup = newArray2DHashSet(nil, equalATNConfigs)
}
func (b *BaseATNConfigSet) FullContext() bool {
@@ -365,7 +365,7 @@ type OrderedATNConfigSet struct {
func NewOrderedATNConfigSet() *OrderedATNConfigSet {
b := NewBaseATNConfigSet(false)
b.configLookup = NewArray2DHashSet(nil, nil)
b.configLookup = newArray2DHashSet(nil, nil)
return &OrderedATNConfigSet{BaseATNConfigSet: b}
}

View File

@@ -4,7 +4,9 @@
package antlr
var ATNDeserializationOptionsdefaultOptions = &ATNDeserializationOptions{true, false, false}
import "errors"
var defaultATNDeserializationOptions = ATNDeserializationOptions{true, true, false}
type ATNDeserializationOptions struct {
readOnly bool
@@ -12,14 +14,48 @@ type ATNDeserializationOptions struct {
generateRuleBypassTransitions bool
}
func NewATNDeserializationOptions(CopyFrom *ATNDeserializationOptions) *ATNDeserializationOptions {
o := new(ATNDeserializationOptions)
func (opts *ATNDeserializationOptions) ReadOnly() bool {
return opts.readOnly
}
if CopyFrom != nil {
o.readOnly = CopyFrom.readOnly
o.verifyATN = CopyFrom.verifyATN
o.generateRuleBypassTransitions = CopyFrom.generateRuleBypassTransitions
func (opts *ATNDeserializationOptions) SetReadOnly(readOnly bool) {
if opts.readOnly {
panic(errors.New("Cannot mutate read only ATNDeserializationOptions"))
}
opts.readOnly = readOnly
}
func (opts *ATNDeserializationOptions) VerifyATN() bool {
return opts.verifyATN
}
func (opts *ATNDeserializationOptions) SetVerifyATN(verifyATN bool) {
if opts.readOnly {
panic(errors.New("Cannot mutate read only ATNDeserializationOptions"))
}
opts.verifyATN = verifyATN
}
func (opts *ATNDeserializationOptions) GenerateRuleBypassTransitions() bool {
return opts.generateRuleBypassTransitions
}
func (opts *ATNDeserializationOptions) SetGenerateRuleBypassTransitions(generateRuleBypassTransitions bool) {
if opts.readOnly {
panic(errors.New("Cannot mutate read only ATNDeserializationOptions"))
}
opts.generateRuleBypassTransitions = generateRuleBypassTransitions
}
func DefaultATNDeserializationOptions() *ATNDeserializationOptions {
return NewATNDeserializationOptions(&defaultATNDeserializationOptions)
}
func NewATNDeserializationOptions(other *ATNDeserializationOptions) *ATNDeserializationOptions {
o := new(ATNDeserializationOptions)
if other != nil {
*o = *other
o.readOnly = false
}
return o
}

View File

@@ -5,50 +5,34 @@
package antlr
import (
"encoding/hex"
"fmt"
"strconv"
"strings"
"unicode/utf16"
)
// This is the earliest supported serialized UUID.
// stick to serialized version for now, we don't need a UUID instance
var BaseSerializedUUID = "AADB8D7E-AEEF-4415-AD2B-8204D6CF042E"
var AddedUnicodeSMP = "59627784-3BE5-417A-B9EB-8131A7286089"
const serializedVersion = 4
// This list contains all of the currently supported UUIDs, ordered by when
// the feature first appeared in this branch.
var SupportedUUIDs = []string{BaseSerializedUUID, AddedUnicodeSMP}
var SerializedVersion = 3
// This is the current serialized UUID.
var SerializedUUID = AddedUnicodeSMP
type LoopEndStateIntPair struct {
type loopEndStateIntPair struct {
item0 *LoopEndState
item1 int
}
type BlockStartStateIntPair struct {
type blockStartStateIntPair struct {
item0 BlockStartState
item1 int
}
type ATNDeserializer struct {
deserializationOptions *ATNDeserializationOptions
data []rune
pos int
uuid string
options *ATNDeserializationOptions
data []int32
pos int
}
func NewATNDeserializer(options *ATNDeserializationOptions) *ATNDeserializer {
if options == nil {
options = ATNDeserializationOptionsdefaultOptions
options = &defaultATNDeserializationOptions
}
return &ATNDeserializer{deserializationOptions: options}
return &ATNDeserializer{options: options}
}
func stringInSlice(a string, list []string) int {
@@ -61,30 +45,10 @@ func stringInSlice(a string, list []string) int {
return -1
}
// isFeatureSupported determines if a particular serialized representation of an
// ATN supports a particular feature, identified by the UUID used for
// serializing the ATN at the time the feature was first introduced. Feature is
// the UUID marking the first time the feature was supported in the serialized
// ATN. ActualUuid is the UUID of the actual serialized ATN which is currently
// being deserialized. It returns true if actualUuid represents a serialized ATN
// at or after the feature identified by feature was introduced, and otherwise
// false.
func (a *ATNDeserializer) isFeatureSupported(feature, actualUUID string) bool {
idx1 := stringInSlice(feature, SupportedUUIDs)
if idx1 < 0 {
return false
}
idx2 := stringInSlice(actualUUID, SupportedUUIDs)
return idx2 >= idx1
}
func (a *ATNDeserializer) DeserializeFromUInt16(data []uint16) *ATN {
a.reset(utf16.Decode(data))
func (a *ATNDeserializer) Deserialize(data []int32) *ATN {
a.data = data
a.pos = 0
a.checkVersion()
a.checkUUID()
atn := a.readATN()
@@ -92,15 +56,7 @@ func (a *ATNDeserializer) DeserializeFromUInt16(data []uint16) *ATN {
a.readRules(atn)
a.readModes(atn)
sets := make([]*IntervalSet, 0)
// First, deserialize sets with 16-bit arguments <= U+FFFF.
sets = a.readSets(atn, sets, a.readInt)
// Next, if the ATN was serialized with the Unicode SMP feature,
// deserialize sets with 32-bit arguments <= U+10FFFF.
if (a.isFeatureSupported(AddedUnicodeSMP, a.uuid)) {
sets = a.readSets(atn, sets, a.readInt32)
}
sets := a.readSets(atn, nil)
a.readEdges(atn, sets)
a.readDecisions(atn)
@@ -108,7 +64,7 @@ func (a *ATNDeserializer) DeserializeFromUInt16(data []uint16) *ATN {
a.markPrecedenceDecisions(atn)
a.verifyATN(atn)
if a.deserializationOptions.generateRuleBypassTransitions && atn.grammarType == ATNTypeParser {
if a.options.GenerateRuleBypassTransitions() && atn.grammarType == ATNTypeParser {
a.generateRuleBypassTransitions(atn)
// Re-verify after modification
a.verifyATN(atn)
@@ -118,42 +74,14 @@ func (a *ATNDeserializer) DeserializeFromUInt16(data []uint16) *ATN {
}
func (a *ATNDeserializer) reset(data []rune) {
temp := make([]rune, len(data))
for i, c := range data {
// Don't adjust the first value since that's the version number
if i == 0 {
temp[i] = c
} else if c > 1 {
temp[i] = c - 2
} else {
temp[i] = c + 65533
}
}
a.data = temp
a.pos = 0
}
func (a *ATNDeserializer) checkVersion() {
version := a.readInt()
if version != SerializedVersion {
panic("Could not deserialize ATN with version " + strconv.Itoa(version) + " (expected " + strconv.Itoa(SerializedVersion) + ").")
if version != serializedVersion {
panic("Could not deserialize ATN with version " + strconv.Itoa(version) + " (expected " + strconv.Itoa(serializedVersion) + ").")
}
}
func (a *ATNDeserializer) checkUUID() {
uuid := a.readUUID()
if stringInSlice(uuid, SupportedUUIDs) < 0 {
panic("Could not deserialize ATN with UUID: " + uuid + " (expected " + SerializedUUID + " or a legacy UUID).")
}
a.uuid = uuid
}
func (a *ATNDeserializer) readATN() *ATN {
grammarType := a.readInt()
maxTokenType := a.readInt()
@@ -162,37 +90,36 @@ func (a *ATNDeserializer) readATN() *ATN {
}
func (a *ATNDeserializer) readStates(atn *ATN) {
loopBackStateNumbers := make([]LoopEndStateIntPair, 0)
endStateNumbers := make([]BlockStartStateIntPair, 0)
nstates := a.readInt()
// Allocate worst case size.
loopBackStateNumbers := make([]loopEndStateIntPair, 0, nstates)
endStateNumbers := make([]blockStartStateIntPair, 0, nstates)
// Preallocate states slice.
atn.states = make([]ATNState, 0, nstates)
for i := 0; i < nstates; i++ {
stype := a.readInt()
// Ignore bad types of states
if stype == ATNStateInvalidType {
atn.addState(nil)
continue
}
ruleIndex := a.readInt()
if ruleIndex == 0xFFFF {
ruleIndex = -1
}
s := a.stateFactory(stype, ruleIndex)
if stype == ATNStateLoopEnd {
loopBackStateNumber := a.readInt()
loopBackStateNumbers = append(loopBackStateNumbers, LoopEndStateIntPair{s.(*LoopEndState), loopBackStateNumber})
loopBackStateNumbers = append(loopBackStateNumbers, loopEndStateIntPair{s.(*LoopEndState), loopBackStateNumber})
} else if s2, ok := s.(BlockStartState); ok {
endStateNumber := a.readInt()
endStateNumbers = append(endStateNumbers, BlockStartStateIntPair{s2, endStateNumber})
endStateNumbers = append(endStateNumbers, blockStartStateIntPair{s2, endStateNumber})
}
atn.addState(s)
@@ -200,20 +127,15 @@ func (a *ATNDeserializer) readStates(atn *ATN) {
// Delay the assignment of loop back and end states until we know all the state
// instances have been initialized
for j := 0; j < len(loopBackStateNumbers); j++ {
pair := loopBackStateNumbers[j]
for _, pair := range loopBackStateNumbers {
pair.item0.loopBackState = atn.states[pair.item1]
}
for j := 0; j < len(endStateNumbers); j++ {
pair := endStateNumbers[j]
for _, pair := range endStateNumbers {
pair.item0.setEndState(atn.states[pair.item1].(*BlockEndState))
}
numNonGreedyStates := a.readInt()
for j := 0; j < numNonGreedyStates; j++ {
stateNumber := a.readInt()
@@ -221,7 +143,6 @@ func (a *ATNDeserializer) readStates(atn *ATN) {
}
numPrecedenceStates := a.readInt()
for j := 0; j < numPrecedenceStates; j++ {
stateNumber := a.readInt()
@@ -233,12 +154,12 @@ func (a *ATNDeserializer) readRules(atn *ATN) {
nrules := a.readInt()
if atn.grammarType == ATNTypeLexer {
atn.ruleToTokenType = make([]int, nrules) // TODO: initIntArray(nrules, 0)
atn.ruleToTokenType = make([]int, nrules)
}
atn.ruleToStartState = make([]*RuleStartState, nrules) // TODO: initIntArray(nrules, 0)
atn.ruleToStartState = make([]*RuleStartState, nrules)
for i := 0; i < nrules; i++ {
for i := range atn.ruleToStartState {
s := a.readInt()
startState := atn.states[s].(*RuleStartState)
@@ -247,19 +168,13 @@ func (a *ATNDeserializer) readRules(atn *ATN) {
if atn.grammarType == ATNTypeLexer {
tokenType := a.readInt()
if tokenType == 0xFFFF {
tokenType = TokenEOF
}
atn.ruleToTokenType[i] = tokenType
}
}
atn.ruleToStopState = make([]*RuleStopState, nrules) //initIntArray(nrules, 0)
for i := 0; i < len(atn.states); i++ {
state := atn.states[i]
atn.ruleToStopState = make([]*RuleStopState, nrules)
for _, state := range atn.states {
if s2, ok := state.(*RuleStopState); ok {
atn.ruleToStopState[s2.ruleIndex] = s2
atn.ruleToStartState[s2.ruleIndex].stopState = s2
@@ -269,17 +184,25 @@ func (a *ATNDeserializer) readRules(atn *ATN) {
func (a *ATNDeserializer) readModes(atn *ATN) {
nmodes := a.readInt()
atn.modeToStartState = make([]*TokensStartState, nmodes)
for i := 0; i < nmodes; i++ {
for i := range atn.modeToStartState {
s := a.readInt()
atn.modeToStartState = append(atn.modeToStartState, atn.states[s].(*TokensStartState))
atn.modeToStartState[i] = atn.states[s].(*TokensStartState)
}
}
func (a *ATNDeserializer) readSets(atn *ATN, sets []*IntervalSet, readUnicode func() int) []*IntervalSet {
func (a *ATNDeserializer) readSets(atn *ATN, sets []*IntervalSet) []*IntervalSet {
m := a.readInt()
// Preallocate the needed capacity.
if cap(sets)-len(sets) < m {
isets := make([]*IntervalSet, len(sets), len(sets)+m)
copy(isets, sets)
sets = isets
}
for i := 0; i < m; i++ {
iset := NewIntervalSet()
@@ -293,8 +216,8 @@ func (a *ATNDeserializer) readSets(atn *ATN, sets []*IntervalSet, readUnicode fu
}
for j := 0; j < n; j++ {
i1 := readUnicode()
i2 := readUnicode()
i1 := a.readInt()
i2 := a.readInt()
iset.addRange(i1, i2)
}
@@ -322,11 +245,9 @@ func (a *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
}
// Edges for rule stop states can be derived, so they are not serialized
for i := 0; i < len(atn.states); i++ {
state := atn.states[i]
for j := 0; j < len(state.GetTransitions()); j++ {
var t, ok = state.GetTransitions()[j].(*RuleTransition)
for _, state := range atn.states {
for _, t := range state.GetTransitions() {
var rt, ok = t.(*RuleTransition)
if !ok {
continue
@@ -334,48 +255,42 @@ func (a *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
outermostPrecedenceReturn := -1
if atn.ruleToStartState[t.getTarget().GetRuleIndex()].isPrecedenceRule {
if t.precedence == 0 {
outermostPrecedenceReturn = t.getTarget().GetRuleIndex()
if atn.ruleToStartState[rt.getTarget().GetRuleIndex()].isPrecedenceRule {
if rt.precedence == 0 {
outermostPrecedenceReturn = rt.getTarget().GetRuleIndex()
}
}
trans := NewEpsilonTransition(t.followState, outermostPrecedenceReturn)
trans := NewEpsilonTransition(rt.followState, outermostPrecedenceReturn)
atn.ruleToStopState[t.getTarget().GetRuleIndex()].AddTransition(trans, -1)
atn.ruleToStopState[rt.getTarget().GetRuleIndex()].AddTransition(trans, -1)
}
}
for i := 0; i < len(atn.states); i++ {
state := atn.states[i]
if s2, ok := state.(*BaseBlockStartState); ok {
for _, state := range atn.states {
if s2, ok := state.(BlockStartState); ok {
// We need to know the end state to set its start state
if s2.endState == nil {
if s2.getEndState() == nil {
panic("IllegalState")
}
// Block end states can only be associated to a single block start state
if s2.endState.startState != nil {
if s2.getEndState().startState != nil {
panic("IllegalState")
}
s2.endState.startState = state
s2.getEndState().startState = state
}
if s2, ok := state.(*PlusLoopbackState); ok {
for j := 0; j < len(s2.GetTransitions()); j++ {
target := s2.GetTransitions()[j].getTarget()
if t2, ok := target.(*PlusBlockStartState); ok {
for _, t := range s2.GetTransitions() {
if t2, ok := t.getTarget().(*PlusBlockStartState); ok {
t2.loopBackState = state
}
}
} else if s2, ok := state.(*StarLoopbackState); ok {
for j := 0; j < len(s2.GetTransitions()); j++ {
target := s2.GetTransitions()[j].getTarget()
if t2, ok := target.(*StarLoopEntryState); ok {
for _, t := range s2.GetTransitions() {
if t2, ok := t.getTarget().(*StarLoopEntryState); ok {
t2.loopBackState = state
}
}
@@ -399,25 +314,13 @@ func (a *ATNDeserializer) readLexerActions(atn *ATN) {
if atn.grammarType == ATNTypeLexer {
count := a.readInt()
atn.lexerActions = make([]LexerAction, count) // initIntArray(count, nil)
atn.lexerActions = make([]LexerAction, count)
for i := 0; i < count; i++ {
for i := range atn.lexerActions {
actionType := a.readInt()
data1 := a.readInt()
if data1 == 0xFFFF {
data1 = -1
}
data2 := a.readInt()
if data2 == 0xFFFF {
data2 = -1
}
lexerAction := a.lexerActionFactory(actionType, data1, data2)
atn.lexerActions[i] = lexerAction
atn.lexerActions[i] = a.lexerActionFactory(actionType, data1, data2)
}
}
}
@@ -565,14 +468,12 @@ func (a *ATNDeserializer) markPrecedenceDecisions(atn *ATN) {
}
func (a *ATNDeserializer) verifyATN(atn *ATN) {
if !a.deserializationOptions.verifyATN {
if !a.options.VerifyATN() {
return
}
// Verify assumptions
for i := 0; i < len(atn.states); i++ {
state := atn.states[i]
for _, state := range atn.states {
if state == nil {
continue
}
@@ -587,18 +488,18 @@ func (a *ATNDeserializer) verifyATN(atn *ATN) {
a.checkCondition(s2.loopBackState != nil, "")
a.checkCondition(len(s2.GetTransitions()) == 2, "")
switch s2 := state.(type) {
switch s2.transitions[0].getTarget().(type) {
case *StarBlockStartState:
var _, ok2 = s2.GetTransitions()[1].getTarget().(*LoopEndState)
_, ok := s2.transitions[1].getTarget().(*LoopEndState)
a.checkCondition(ok2, "")
a.checkCondition(ok, "")
a.checkCondition(!s2.nonGreedy, "")
case *LoopEndState:
var s3, ok2 = s2.GetTransitions()[1].getTarget().(*StarBlockStartState)
var _, ok = s2.transitions[1].getTarget().(*StarBlockStartState)
a.checkCondition(ok2, "")
a.checkCondition(s3.nonGreedy, "")
a.checkCondition(ok, "")
a.checkCondition(s2.nonGreedy, "")
default:
panic("IllegalState")
@@ -607,9 +508,9 @@ func (a *ATNDeserializer) verifyATN(atn *ATN) {
case *StarLoopbackState:
a.checkCondition(len(state.GetTransitions()) == 1, "")
var _, ok2 = state.GetTransitions()[0].getTarget().(*StarLoopEntryState)
var _, ok = state.GetTransitions()[0].getTarget().(*StarLoopEntryState)
a.checkCondition(ok2, "")
a.checkCondition(ok, "")
case *LoopEndState:
a.checkCondition(s2.loopBackState != nil, "")
@@ -617,8 +518,8 @@ func (a *ATNDeserializer) verifyATN(atn *ATN) {
case *RuleStartState:
a.checkCondition(s2.stopState != nil, "")
case *BaseBlockStartState:
a.checkCondition(s2.endState != nil, "")
case BlockStartState:
a.checkCondition(s2.getEndState() != nil, "")
case *BlockEndState:
a.checkCondition(s2.startState != nil, "")
@@ -649,53 +550,7 @@ func (a *ATNDeserializer) readInt() int {
a.pos++
return int(v)
}
func (a *ATNDeserializer) readInt32() int {
var low = a.readInt()
var high = a.readInt()
return low | (high << 16)
}
//TODO
//func (a *ATNDeserializer) readLong() int64 {
// panic("Not implemented")
// var low = a.readInt32()
// var high = a.readInt32()
// return (low & 0x00000000FFFFFFFF) | (high << int32)
//}
func createByteToHex() []string {
bth := make([]string, 256)
for i := 0; i < 256; i++ {
bth[i] = strings.ToUpper(hex.EncodeToString([]byte{byte(i)}))
}
return bth
}
var byteToHex = createByteToHex()
func (a *ATNDeserializer) readUUID() string {
bb := make([]int, 16)
for i := 7; i >= 0; i-- {
integer := a.readInt()
bb[(2*i)+1] = integer & 0xFF
bb[2*i] = (integer >> 8) & 0xFF
}
return byteToHex[bb[0]] + byteToHex[bb[1]] +
byteToHex[bb[2]] + byteToHex[bb[3]] + "-" +
byteToHex[bb[4]] + byteToHex[bb[5]] + "-" +
byteToHex[bb[6]] + byteToHex[bb[7]] + "-" +
byteToHex[bb[8]] + byteToHex[bb[9]] + "-" +
byteToHex[bb[10]] + byteToHex[bb[11]] +
byteToHex[bb[12]] + byteToHex[bb[13]] +
byteToHex[bb[14]] + byteToHex[bb[15]]
return int(v) // data is 32 bits but int is at least that big
}
func (a *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, src, trg, arg1, arg2, arg3 int, sets []*IntervalSet) Transition {

View File

@@ -243,6 +243,8 @@ func NewBasicBlockStartState() *BasicBlockStartState {
return &BasicBlockStartState{BaseBlockStartState: b}
}
var _ BlockStartState = &BasicBlockStartState{}
// BlockEndState is a terminal node of a simple (a|b|c) block.
type BlockEndState struct {
*BaseATNState
@@ -318,6 +320,8 @@ func NewPlusBlockStartState() *PlusBlockStartState {
return &PlusBlockStartState{BaseBlockStartState: b}
}
var _ BlockStartState = &PlusBlockStartState{}
// StarBlockStartState is the block that begins a closure loop.
type StarBlockStartState struct {
*BaseBlockStartState
@@ -331,6 +335,8 @@ func NewStarBlockStartState() *StarBlockStartState {
return &StarBlockStartState{BaseBlockStartState: b}
}
var _ BlockStartState = &StarBlockStartState{}
type StarLoopbackState struct {
*BaseATNState
}

View File

@@ -6,7 +6,6 @@ package antlr
import (
"sort"
"sync"
)
type DFA struct {
@@ -18,23 +17,27 @@ type DFA struct {
// states is all the DFA states. Use Map to get the old state back; Set can only
// indicate whether it is there.
states map[int]*DFAState
statesMu sync.RWMutex
s0 *DFAState
s0Mu sync.RWMutex
// precedenceDfa is the backing field for isPrecedenceDfa and setPrecedenceDfa.
// True if the DFA is for a precedence decision and false otherwise.
precedenceDfa bool
precedenceDfaMu sync.RWMutex
}
func NewDFA(atnStartState DecisionState, decision int) *DFA {
return &DFA{
dfa := &DFA{
atnStartState: atnStartState,
decision: decision,
states: make(map[int]*DFAState),
}
if s, ok := atnStartState.(*StarLoopEntryState); ok && s.precedenceRuleDecision {
dfa.precedenceDfa = true
dfa.s0 = NewDFAState(-1, NewBaseATNConfigSet(false))
dfa.s0.isAcceptState = false
dfa.s0.requiresFullContext = false
}
return dfa
}
// getPrecedenceStartState gets the start state for the current precedence and
@@ -79,8 +82,6 @@ func (d *DFA) setPrecedenceStartState(precedence int, startState *DFAState) {
}
func (d *DFA) getPrecedenceDfa() bool {
d.precedenceDfaMu.RLock()
defer d.precedenceDfaMu.RUnlock()
return d.precedenceDfa
}
@@ -104,46 +105,32 @@ func (d *DFA) setPrecedenceDfa(precedenceDfa bool) {
d.setS0(nil)
}
d.precedenceDfaMu.Lock()
defer d.precedenceDfaMu.Unlock()
d.precedenceDfa = precedenceDfa
}
}
func (d *DFA) getS0() *DFAState {
d.s0Mu.RLock()
defer d.s0Mu.RUnlock()
return d.s0
}
func (d *DFA) setS0(s *DFAState) {
d.s0Mu.Lock()
defer d.s0Mu.Unlock()
d.s0 = s
}
func (d *DFA) getState(hash int) (*DFAState, bool) {
d.statesMu.RLock()
defer d.statesMu.RUnlock()
s, ok := d.states[hash]
return s, ok
}
func (d *DFA) setStates(states map[int]*DFAState) {
d.statesMu.Lock()
defer d.statesMu.Unlock()
d.states = states
}
func (d *DFA) setState(hash int, state *DFAState) {
d.statesMu.Lock()
defer d.statesMu.Unlock()
d.states[hash] = state
}
func (d *DFA) numStates() int {
d.statesMu.RLock()
defer d.statesMu.RUnlock()
return len(d.states)
}

View File

@@ -6,7 +6,6 @@ package antlr
import (
"fmt"
"sync"
)
// PredPrediction maps a predicate to a predicted alternative.
@@ -50,8 +49,7 @@ type DFAState struct {
// edges elements point to the target of the symbol. Shift up by 1 so (-1)
// Token.EOF maps to the first element.
edges []*DFAState
edgesMu sync.RWMutex
edges []*DFAState
isAcceptState bool
@@ -93,7 +91,7 @@ func NewDFAState(stateNumber int, configs ATNConfigSet) *DFAState {
// GetAltSet gets the set of all alts mentioned by all ATN configurations in d.
func (d *DFAState) GetAltSet() Set {
alts := NewArray2DHashSet(nil, nil)
alts := newArray2DHashSet(nil, nil)
if d.configs != nil {
for _, c := range d.configs.GetItems() {
@@ -109,32 +107,22 @@ func (d *DFAState) GetAltSet() Set {
}
func (d *DFAState) getEdges() []*DFAState {
d.edgesMu.RLock()
defer d.edgesMu.RUnlock()
return d.edges
}
func (d *DFAState) numEdges() int {
d.edgesMu.RLock()
defer d.edgesMu.RUnlock()
return len(d.edges)
}
func (d *DFAState) getIthEdge(i int) *DFAState {
d.edgesMu.RLock()
defer d.edgesMu.RUnlock()
return d.edges[i]
}
func (d *DFAState) setEdges(newEdges []*DFAState) {
d.edgesMu.Lock()
defer d.edgesMu.Unlock()
d.edges = newEdges
}
func (d *DFAState) setIthEdge(i int, edge *DFAState) {
d.edgesMu.Lock()
defer d.edgesMu.Unlock()
d.edges[i] = edge
}

View File

@@ -16,7 +16,7 @@ type ErrorStrategy interface {
RecoverInline(Parser) Token
Recover(Parser, RecognitionException)
Sync(Parser)
inErrorRecoveryMode(Parser) bool
InErrorRecoveryMode(Parser) bool
ReportError(Parser, RecognitionException)
ReportMatch(Parser)
}
@@ -40,7 +40,7 @@ func NewDefaultErrorStrategy() *DefaultErrorStrategy {
// error". This is used to suppress Reporting multiple error messages while
// attempting to recover from a detected syntax error.
//
// @see //inErrorRecoveryMode
// @see //InErrorRecoveryMode
//
d.errorRecoveryMode = false
@@ -71,7 +71,7 @@ func (d *DefaultErrorStrategy) beginErrorCondition(recognizer Parser) {
d.errorRecoveryMode = true
}
func (d *DefaultErrorStrategy) inErrorRecoveryMode(recognizer Parser) bool {
func (d *DefaultErrorStrategy) InErrorRecoveryMode(recognizer Parser) bool {
return d.errorRecoveryMode
}
@@ -118,7 +118,7 @@ func (d *DefaultErrorStrategy) ReportMatch(recognizer Parser) {
func (d *DefaultErrorStrategy) ReportError(recognizer Parser, e RecognitionException) {
// if we've already Reported an error and have not Matched a token
// yet successfully, don't Report any errors.
if d.inErrorRecoveryMode(recognizer) {
if d.InErrorRecoveryMode(recognizer) {
return // don't Report spurious errors
}
d.beginErrorCondition(recognizer)
@@ -209,7 +209,7 @@ func (d *DefaultErrorStrategy) Recover(recognizer Parser, e RecognitionException
//
func (d *DefaultErrorStrategy) Sync(recognizer Parser) {
// If already recovering, don't try to Sync
if d.inErrorRecoveryMode(recognizer) {
if d.InErrorRecoveryMode(recognizer) {
return
}
@@ -312,7 +312,7 @@ func (d *DefaultErrorStrategy) ReportFailedPredicate(recognizer Parser, e *Faile
// @param recognizer the parser instance
//
func (d *DefaultErrorStrategy) ReportUnwantedToken(recognizer Parser) {
if d.inErrorRecoveryMode(recognizer) {
if d.InErrorRecoveryMode(recognizer) {
return
}
d.beginErrorCondition(recognizer)
@@ -341,7 +341,7 @@ func (d *DefaultErrorStrategy) ReportUnwantedToken(recognizer Parser) {
// @param recognizer the parser instance
//
func (d *DefaultErrorStrategy) ReportMissingToken(recognizer Parser) {
if d.inErrorRecoveryMode(recognizer) {
if d.InErrorRecoveryMode(recognizer) {
return
}
d.beginErrorCondition(recognizer)
@@ -738,7 +738,11 @@ func (b *BailErrorStrategy) Recover(recognizer Parser, e RecognitionException) {
context := recognizer.GetParserRuleContext()
for context != nil {
context.SetException(e)
context = context.GetParent().(ParserRuleContext)
if parent, ok := context.GetParent().(ParserRuleContext); ok {
context = parent
} else {
context = nil
}
}
panic(NewParseCancellationException()) // TODO we don't emit e properly
}

View File

@@ -91,11 +91,16 @@ func (l *LexerATNSimulator) Match(input CharStream, mode int) int {
dfa := l.decisionToDFA[mode]
if dfa.getS0() == nil {
var s0 *DFAState
l.atn.stateMu.RLock()
s0 = dfa.getS0()
l.atn.stateMu.RUnlock()
if s0 == nil {
return l.MatchATN(input)
}
return l.execATN(input, dfa.getS0())
return l.execATN(input, s0)
}
func (l *LexerATNSimulator) reset() {
@@ -117,11 +122,7 @@ func (l *LexerATNSimulator) MatchATN(input CharStream) int {
suppressEdge := s0Closure.hasSemanticContext
s0Closure.hasSemanticContext = false
next := l.addDFAState(s0Closure)
if !suppressEdge {
l.decisionToDFA[l.mode].setS0(next)
}
next := l.addDFAState(s0Closure, suppressEdge)
predict := l.execATN(input, next)
@@ -203,10 +204,15 @@ func (l *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
// {@code t}, or {@code nil} if the target state for l edge is not
// already cached
func (l *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFAState {
if s.getEdges() == nil || t < LexerATNSimulatorMinDFAEdge || t > LexerATNSimulatorMaxDFAEdge {
if t < LexerATNSimulatorMinDFAEdge || t > LexerATNSimulatorMaxDFAEdge {
return nil
}
l.atn.edgeMu.RLock()
defer l.atn.edgeMu.RUnlock()
if s.getEdges() == nil {
return nil
}
target := s.getIthEdge(t - LexerATNSimulatorMinDFAEdge)
if LexerATNSimulatorDebug && target != nil {
fmt.Println("reuse state " + strconv.Itoa(s.stateNumber) + " edge to " + strconv.Itoa(target.stateNumber))
@@ -537,7 +543,7 @@ func (l *LexerATNSimulator) addDFAEdge(from *DFAState, tk int, to *DFAState, cfg
suppressEdge := cfgs.HasSemanticContext()
cfgs.SetHasSemanticContext(false)
to = l.addDFAState(cfgs)
to = l.addDFAState(cfgs, true)
if suppressEdge {
return to
@@ -551,6 +557,8 @@ func (l *LexerATNSimulator) addDFAEdge(from *DFAState, tk int, to *DFAState, cfg
if LexerATNSimulatorDebug {
fmt.Println("EDGE " + from.String() + " -> " + to.String() + " upon " + strconv.Itoa(tk))
}
l.atn.edgeMu.Lock()
defer l.atn.edgeMu.Unlock()
if from.getEdges() == nil {
// make room for tokens 1..n and -1 masquerading as index 0
from.setEdges(make([]*DFAState, LexerATNSimulatorMaxDFAEdge-LexerATNSimulatorMinDFAEdge+1))
@@ -564,7 +572,7 @@ func (l *LexerATNSimulator) addDFAEdge(from *DFAState, tk int, to *DFAState, cfg
// configurations already. This method also detects the first
// configuration containing an ATN rule stop state. Later, when
// traversing the DFA, we will know which rule to accept.
func (l *LexerATNSimulator) addDFAState(configs ATNConfigSet) *DFAState {
func (l *LexerATNSimulator) addDFAState(configs ATNConfigSet, suppressEdge bool) *DFAState {
proposed := NewDFAState(-1, configs)
var firstConfigWithRuleStopState ATNConfig
@@ -585,16 +593,22 @@ func (l *LexerATNSimulator) addDFAState(configs ATNConfigSet) *DFAState {
}
hash := proposed.hash()
dfa := l.decisionToDFA[l.mode]
l.atn.stateMu.Lock()
defer l.atn.stateMu.Unlock()
existing, ok := dfa.getState(hash)
if ok {
return existing
proposed = existing
} else {
proposed.stateNumber = dfa.numStates()
configs.SetReadOnly(true)
proposed.configs = configs
dfa.setState(hash, proposed)
}
newState := proposed
newState.stateNumber = dfa.numStates()
configs.SetReadOnly(true)
newState.configs = configs
dfa.setState(hash, newState)
return newState
if !suppressEdge {
dfa.setS0(proposed)
}
return proposed
}
func (l *LexerATNSimulator) getDFA(mode int) *DFA {

View File

@@ -38,7 +38,7 @@ func (la *LL1Analyzer) getDecisionLookahead(s ATNState) []*IntervalSet {
look := make([]*IntervalSet, count)
for alt := 0; alt < count; alt++ {
look[alt] = NewIntervalSet()
lookBusy := NewArray2DHashSet(nil, nil)
lookBusy := newArray2DHashSet(nil, nil)
seeThruPreds := false // fail to get lookahead upon pred
la.look1(s.GetTransitions()[alt].getTarget(), nil, BasePredictionContextEMPTY, look[alt], lookBusy, NewBitSet(), seeThruPreds, false)
// Wipe out lookahead for la alternative if we found nothing
@@ -75,7 +75,7 @@ func (la *LL1Analyzer) Look(s, stopState ATNState, ctx RuleContext) *IntervalSet
if ctx != nil {
lookContext = predictionContextFromRuleContext(s.GetATN(), ctx)
}
la.look1(s, stopState, lookContext, r, NewArray2DHashSet(nil, nil), NewBitSet(), seeThruPreds, true)
la.look1(s, stopState, lookContext, r, newArray2DHashSet(nil, nil), NewBitSet(), seeThruPreds, true)
return r
}

View File

@@ -425,7 +425,7 @@ func (p *BaseParser) Consume() Token {
}
hasListener := p.parseListeners != nil && len(p.parseListeners) > 0
if p.BuildParseTrees || hasListener {
if p.errHandler.inErrorRecoveryMode(p) {
if p.errHandler.InErrorRecoveryMode(p) {
node := p.ctx.AddErrorNode(o)
if p.parseListeners != nil {
for _, l := range p.parseListeners {

View File

@@ -96,14 +96,18 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou
// Now we are certain to have a specific decision's DFA
// But, do we still need an initial state?
var s0 *DFAState
p.atn.stateMu.RLock()
if dfa.getPrecedenceDfa() {
p.atn.edgeMu.RLock()
// the start state for a precedence DFA depends on the current
// parser precedence, and is provided by a DFA method.
s0 = dfa.getPrecedenceStartState(p.parser.GetPrecedence())
p.atn.edgeMu.RUnlock()
} else {
// the start state for a "regular" DFA is just s0
s0 = dfa.getS0()
}
p.atn.stateMu.RUnlock()
if s0 == nil {
if outerContext == nil {
@@ -114,21 +118,10 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou
" exec LA(1)==" + p.getLookaheadName(input) +
", outerContext=" + outerContext.String(p.parser.GetRuleNames(), nil))
}
// If p is not a precedence DFA, we check the ATN start state
// to determine if p ATN start state is the decision for the
// closure block that determines whether a precedence rule
// should continue or complete.
t2 := dfa.atnStartState
t, ok := t2.(*StarLoopEntryState)
if !dfa.getPrecedenceDfa() && ok {
if t.precedenceRuleDecision {
dfa.setPrecedenceDfa(true)
}
}
fullCtx := false
s0Closure := p.computeStartState(dfa.atnStartState, RuleContextEmpty, fullCtx)
p.atn.stateMu.Lock()
if dfa.getPrecedenceDfa() {
// If p is a precedence DFA, we use applyPrecedenceFilter
// to convert the computed start state to a precedence start
@@ -139,12 +132,16 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou
dfa.s0.configs = s0Closure
s0Closure = p.applyPrecedenceFilter(s0Closure)
s0 = p.addDFAState(dfa, NewDFAState(-1, s0Closure))
p.atn.edgeMu.Lock()
dfa.setPrecedenceStartState(p.parser.GetPrecedence(), s0)
p.atn.edgeMu.Unlock()
} else {
s0 = p.addDFAState(dfa, NewDFAState(-1, s0Closure))
dfa.setS0(s0)
}
p.atn.stateMu.Unlock()
}
alt := p.execATN(dfa, s0, input, index, outerContext)
if ParserATNSimulatorDebug {
fmt.Println("DFA after predictATN: " + dfa.String(p.parser.GetLiteralNames(), nil))
@@ -295,11 +292,16 @@ func (p *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream,
// already cached
func (p *ParserATNSimulator) getExistingTargetState(previousD *DFAState, t int) *DFAState {
edges := previousD.getEdges()
if edges == nil || t+1 < 0 || t+1 >= len(edges) {
if t+1 < 0 {
return nil
}
p.atn.edgeMu.RLock()
defer p.atn.edgeMu.RUnlock()
edges := previousD.getEdges()
if edges == nil || t+1 >= len(edges) {
return nil
}
return previousD.getIthEdge(t + 1)
}
@@ -568,7 +570,7 @@ func (p *ParserATNSimulator) computeReachSet(closure ATNConfigSet, t int, fullCt
//
if reach == nil {
reach = NewBaseATNConfigSet(fullCtx)
closureBusy := NewArray2DHashSet(nil, nil)
closureBusy := newArray2DHashSet(nil, nil)
treatEOFAsEpsilon := t == TokenEOF
amount := len(intermediate.configs)
for k := 0; k < amount; k++ {
@@ -663,7 +665,7 @@ func (p *ParserATNSimulator) computeStartState(a ATNState, ctx RuleContext, full
for i := 0; i < len(a.GetTransitions()); i++ {
target := a.GetTransitions()[i].getTarget()
c := NewBaseATNConfig6(target, i+1, initialContext)
closureBusy := NewArray2DHashSet(nil, nil)
closureBusy := newArray2DHashSet(nil, nil)
p.closure(c, configs, closureBusy, true, fullCtx, false)
}
return configs
@@ -1446,14 +1448,18 @@ func (p *ParserATNSimulator) addDFAEdge(dfa *DFA, from *DFAState, t int, to *DFA
if to == nil {
return nil
}
p.atn.stateMu.Lock()
to = p.addDFAState(dfa, to) // used existing if possible not incoming
p.atn.stateMu.Unlock()
if from == nil || t < -1 || t > p.atn.maxTokenType {
return to
}
p.atn.edgeMu.Lock()
if from.getEdges() == nil {
from.setEdges(make([]*DFAState, p.atn.maxTokenType+1+1))
}
from.setIthEdge(t+1, to) // connect
p.atn.edgeMu.Unlock()
if ParserATNSimulatorDebug {
var names []string

View File

@@ -49,7 +49,7 @@ var tokenTypeMapCache = make(map[string]int)
var ruleIndexMapCache = make(map[string]int)
func (b *BaseRecognizer) checkVersion(toolVersion string) {
runtimeVersion := "4.9.3"
runtimeVersion := "4.10.1"
if runtimeVersion != toolVersion {
fmt.Println("ANTLR runtime and generated code versions disagree: " + runtimeVersion + "!=" + toolVersion)
}

View File

@@ -193,7 +193,7 @@ type AND struct {
func NewAND(a, b SemanticContext) *AND {
operands := NewArray2DHashSet(nil, nil)
operands := newArray2DHashSet(nil, nil)
if aa, ok := a.(*AND); ok {
for _, o := range aa.opnds {
operands.Add(o)
@@ -345,7 +345,7 @@ type OR struct {
func NewOR(a, b SemanticContext) *OR {
operands := NewArray2DHashSet(nil, nil)
operands := newArray2DHashSet(nil, nil)
if aa, ok := a.(*OR); ok {
for _, o := range aa.opnds {
operands.Add(o)

View File

@@ -64,7 +64,7 @@ type BaseParseTreeVisitor struct{}
var _ ParseTreeVisitor = &BaseParseTreeVisitor{}
func (v *BaseParseTreeVisitor) Visit(tree ParseTree) interface{} { return nil }
func (v *BaseParseTreeVisitor) Visit(tree ParseTree) interface{} { return tree.Accept(v) }
func (v *BaseParseTreeVisitor) VisitChildren(node RuleNode) interface{} { return nil }
func (v *BaseParseTreeVisitor) VisitTerminal(node TerminalNode) interface{} { return nil }
func (v *BaseParseTreeVisitor) VisitErrorNode(node ErrorNode) interface{} { return nil }

View File

@@ -8,7 +8,7 @@ import (
"bytes"
"errors"
"fmt"
"sort"
"math/bits"
"strconv"
"strings"
)
@@ -71,59 +71,92 @@ type hasher interface {
hash() int
}
const bitsPerWord = 64
func indexForBit(bit int) int {
return bit / bitsPerWord
}
func wordForBit(data []uint64, bit int) uint64 {
idx := indexForBit(bit)
if idx >= len(data) {
return 0
}
return data[idx]
}
func maskForBit(bit int) uint64 {
return uint64(1) << (bit % bitsPerWord)
}
func wordsNeeded(bit int) int {
return indexForBit(bit) + 1
}
type BitSet struct {
data map[int]bool
data []uint64
}
func NewBitSet() *BitSet {
b := new(BitSet)
b.data = make(map[int]bool)
return b
return &BitSet{}
}
func (b *BitSet) add(value int) {
b.data[value] = true
idx := indexForBit(value)
if idx >= len(b.data) {
size := wordsNeeded(value)
data := make([]uint64, size)
copy(data, b.data)
b.data = data
}
b.data[idx] |= maskForBit(value)
}
func (b *BitSet) clear(index int) {
delete(b.data, index)
idx := indexForBit(index)
if idx >= len(b.data) {
return
}
b.data[idx] &= ^maskForBit(index)
}
func (b *BitSet) or(set *BitSet) {
for k := range set.data {
b.add(k)
// Get min size necessary to represent the bits in both sets.
bLen := b.minLen()
setLen := set.minLen()
maxLen := intMax(bLen, setLen)
if maxLen > len(b.data) {
// Increase the size of len(b.data) to repesent the bits in both sets.
data := make([]uint64, maxLen)
copy(data, b.data)
b.data = data
}
// len(b.data) is at least setLen.
for i := 0; i < setLen; i++ {
b.data[i] |= set.data[i]
}
}
func (b *BitSet) remove(value int) {
delete(b.data, value)
b.clear(value)
}
func (b *BitSet) contains(value int) bool {
return b.data[value]
}
func (b *BitSet) values() []int {
ks := make([]int, len(b.data))
i := 0
for k := range b.data {
ks[i] = k
i++
idx := indexForBit(value)
if idx >= len(b.data) {
return false
}
sort.Ints(ks)
return ks
return (b.data[idx] & maskForBit(value)) != 0
}
func (b *BitSet) minValue() int {
min := 2147483647
for k := range b.data {
if k < min {
min = k
for i, v := range b.data {
if v == 0 {
continue
}
return i*bitsPerWord + bits.TrailingZeros64(v)
}
return min
return 2147483647
}
func (b *BitSet) equals(other interface{}) bool {
@@ -132,12 +165,22 @@ func (b *BitSet) equals(other interface{}) bool {
return false
}
if len(b.data) != len(otherBitSet.data) {
if b == otherBitSet {
return true
}
// We only compare set bits, so we cannot rely on the two slices having the same size. Its
// possible for two BitSets to have different slice lengths but the same set bits. So we only
// compare the relavent words and ignore the trailing zeros.
bLen := b.minLen()
otherLen := otherBitSet.minLen()
if bLen != otherLen {
return false
}
for k, v := range b.data {
if otherBitSet.data[k] != v {
for i := 0; i < bLen; i++ {
if b.data[i] != otherBitSet.data[i] {
return false
}
}
@@ -145,18 +188,35 @@ func (b *BitSet) equals(other interface{}) bool {
return true
}
func (b *BitSet) minLen() int {
for i := len(b.data); i > 0; i-- {
if b.data[i-1] != 0 {
return i
}
}
return 0
}
func (b *BitSet) length() int {
return len(b.data)
cnt := 0
for _, val := range b.data {
cnt += bits.OnesCount64(val)
}
return cnt
}
func (b *BitSet) String() string {
vals := b.values()
valsS := make([]string, len(vals))
vals := make([]string, 0, b.length())
for i, val := range vals {
valsS[i] = strconv.Itoa(val)
for i, v := range b.data {
for v != 0 {
n := bits.TrailingZeros64(v)
vals = append(vals, strconv.Itoa(i*bitsPerWord+n))
v &= ^(uint64(1) << n)
}
}
return "{" + strings.Join(valsS, ", ") + "}"
return "{" + strings.Join(vals, ", ") + "}"
}
type AltDict struct {

View File

@@ -8,7 +8,7 @@ const (
_loadFactor = 0.75
)
var _ Set = (*Array2DHashSet)(nil)
var _ Set = (*array2DHashSet)(nil)
type Set interface {
Add(value interface{}) (added interface{})
@@ -19,7 +19,7 @@ type Set interface {
Each(f func(interface{}) bool)
}
type Array2DHashSet struct {
type array2DHashSet struct {
buckets [][]interface{}
hashcodeFunction func(interface{}) int
equalsFunction func(interface{}, interface{}) bool
@@ -31,7 +31,7 @@ type Array2DHashSet struct {
initialBucketCapacity int
}
func (as *Array2DHashSet) Each(f func(interface{}) bool) {
func (as *array2DHashSet) Each(f func(interface{}) bool) {
if as.Len() < 1 {
return
}
@@ -48,7 +48,7 @@ func (as *Array2DHashSet) Each(f func(interface{}) bool) {
}
}
func (as *Array2DHashSet) Values() []interface{} {
func (as *array2DHashSet) Values() []interface{} {
if as.Len() < 1 {
return nil
}
@@ -61,18 +61,18 @@ func (as *Array2DHashSet) Values() []interface{} {
return values
}
func (as *Array2DHashSet) Contains(value interface{}) bool {
func (as *array2DHashSet) Contains(value interface{}) bool {
return as.Get(value) != nil
}
func (as *Array2DHashSet) Add(value interface{}) interface{} {
func (as *array2DHashSet) Add(value interface{}) interface{} {
if as.n > as.threshold {
as.expand()
}
return as.innerAdd(value)
}
func (as *Array2DHashSet) expand() {
func (as *array2DHashSet) expand() {
old := as.buckets
as.currentPrime += 4
@@ -120,11 +120,11 @@ func (as *Array2DHashSet) expand() {
}
}
func (as *Array2DHashSet) Len() int {
func (as *array2DHashSet) Len() int {
return as.n
}
func (as *Array2DHashSet) Get(o interface{}) interface{} {
func (as *array2DHashSet) Get(o interface{}) interface{} {
if o == nil {
return nil
}
@@ -147,7 +147,7 @@ func (as *Array2DHashSet) Get(o interface{}) interface{} {
return nil
}
func (as *Array2DHashSet) innerAdd(o interface{}) interface{} {
func (as *array2DHashSet) innerAdd(o interface{}) interface{} {
b := as.getBuckets(o)
bucket := as.buckets[b]
@@ -187,25 +187,25 @@ func (as *Array2DHashSet) innerAdd(o interface{}) interface{} {
return o
}
func (as *Array2DHashSet) getBuckets(value interface{}) int {
func (as *array2DHashSet) getBuckets(value interface{}) int {
hash := as.hashcodeFunction(value)
return hash & (len(as.buckets) - 1)
}
func (as *Array2DHashSet) createBuckets(cap int) [][]interface{} {
func (as *array2DHashSet) createBuckets(cap int) [][]interface{} {
return make([][]interface{}, cap)
}
func (as *Array2DHashSet) createBucket(cap int) []interface{} {
func (as *array2DHashSet) createBucket(cap int) []interface{} {
return make([]interface{}, cap)
}
func NewArray2DHashSetWithCap(
func newArray2DHashSetWithCap(
hashcodeFunction func(interface{}) int,
equalsFunction func(interface{}, interface{}) bool,
initCap int,
initBucketCap int,
) *Array2DHashSet {
) *array2DHashSet {
if hashcodeFunction == nil {
hashcodeFunction = standardHashFunction
}
@@ -214,7 +214,7 @@ func NewArray2DHashSetWithCap(
equalsFunction = standardEqualsFunction
}
ret := &Array2DHashSet{
ret := &array2DHashSet{
hashcodeFunction: hashcodeFunction,
equalsFunction: equalsFunction,
@@ -229,9 +229,9 @@ func NewArray2DHashSetWithCap(
return ret
}
func NewArray2DHashSet(
func newArray2DHashSet(
hashcodeFunction func(interface{}) int,
equalsFunction func(interface{}, interface{}) bool,
) *Array2DHashSet {
return NewArray2DHashSetWithCap(hashcodeFunction, equalsFunction, _initalCapacity, _initalBucketCapacity)
) *array2DHashSet {
return newArray2DHashSetWithCap(hashcodeFunction, equalsFunction, _initalCapacity, _initalBucketCapacity)
}

View File

@@ -8,9 +8,11 @@ go_library(
name = "go_default_library",
srcs = [
"cel.go",
"decls.go",
"env.go",
"io.go",
"library.go",
"macro.go",
"options.go",
"program.go",
],
@@ -21,6 +23,7 @@ go_library(
"//checker/decls:go_default_library",
"//common:go_default_library",
"//common/containers:go_default_library",
"//common/overloads:go_default_library",
"//common/types:go_default_library",
"//common/types/pb:go_default_library",
"//common/types/ref:go_default_library",
@@ -46,6 +49,7 @@ go_test(
srcs = [
"cel_example_test.go",
"cel_test.go",
"decls_test.go",
"env_test.go",
"io_test.go",
],
@@ -56,13 +60,11 @@ go_test(
":go_default_library",
],
deps = [
"//checker/decls:go_default_library",
"//common/operators:go_default_library",
"//common/overloads:go_default_library",
"//common/types:go_default_library",
"//common/types/ref:go_default_library",
"//common/types/traits:go_default_library",
"//interpreter/functions:go_default_library",
"//test:go_default_library",
"//test/proto2pb:go_default_library",
"//test/proto3pb:go_default_library",

1133
vendor/github.com/google/cel-go/cel/decls.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -61,11 +61,23 @@ func (ast *Ast) SourceInfo() *exprpb.SourceInfo {
// ResultType returns the output type of the expression if the Ast has been type-checked, else
// returns decls.Dyn as the parse step cannot infer the type.
//
// Deprecated: use OutputType
func (ast *Ast) ResultType() *exprpb.Type {
if !ast.IsChecked() {
return decls.Dyn
}
return ast.typeMap[ast.expr.Id]
return ast.typeMap[ast.expr.GetId()]
}
// OutputType returns the output type of the expression if the Ast has been type-checked, else
// returns cel.DynType as the parse step cannot infer types.
func (ast *Ast) OutputType() *Type {
t, err := ExprTypeToType(ast.ResultType())
if err != nil {
return DynType
}
return t
}
// Source returns a view of the input used to create the Ast. This source may be complete or
@@ -82,12 +94,14 @@ func FormatType(t *exprpb.Type) string {
// Env encapsulates the context necessary to perform parsing, type checking, or generation of
// evaluable programs for different expressions.
type Env struct {
Container *containers.Container
declarations []*exprpb.Decl
macros []parser.Macro
adapter ref.TypeAdapter
provider ref.TypeProvider
features map[int]bool
Container *containers.Container
functions map[string]*functionDecl
declarations []*exprpb.Decl
macros []parser.Macro
adapter ref.TypeAdapter
provider ref.TypeProvider
features map[int]bool
appliedFeatures map[int]bool
// Internal parser representation
prsr *parser.Parser
@@ -137,13 +151,15 @@ func NewCustomEnv(opts ...EnvOption) (*Env, error) {
return nil, err
}
return (&Env{
declarations: []*exprpb.Decl{},
macros: []parser.Macro{},
Container: containers.DefaultContainer,
adapter: registry,
provider: registry,
features: map[int]bool{},
progOpts: []ProgramOption{},
declarations: []*exprpb.Decl{},
functions: map[string]*functionDecl{},
macros: []parser.Macro{},
Container: containers.DefaultContainer,
adapter: registry,
provider: registry,
features: map[int]bool{},
appliedFeatures: map[int]bool{},
progOpts: []ProgramOption{},
}).configure(opts)
}
@@ -280,16 +296,27 @@ func (e *Env) Extend(opts ...EnvOption) (*Env, error) {
for k, v := range e.features {
featuresCopy[k] = v
}
appliedFeaturesCopy := make(map[int]bool, len(e.appliedFeatures))
for k, v := range e.appliedFeatures {
appliedFeaturesCopy[k] = v
}
funcsCopy := make(map[string]*functionDecl, len(e.functions))
for k, v := range e.functions {
funcsCopy[k] = v
}
// TODO: functions copy needs to happen here.
ext := &Env{
Container: e.Container,
declarations: decsCopy,
macros: macsCopy,
progOpts: progOptsCopy,
adapter: adapter,
features: featuresCopy,
provider: provider,
chkOpts: chkOptsCopy,
Container: e.Container,
declarations: decsCopy,
functions: funcsCopy,
macros: macsCopy,
progOpts: progOptsCopy,
adapter: adapter,
features: featuresCopy,
appliedFeatures: appliedFeaturesCopy,
provider: provider,
chkOpts: chkOptsCopy,
}
return ext.configure(opts)
}
@@ -436,6 +463,28 @@ func (e *Env) configure(opts []EnvOption) (*Env, error) {
}
}
// If the default UTC timezone fix has been enabled, make sure the library is configured
if e.HasFeature(featureDefaultUTCTimeZone) {
if _, found := e.appliedFeatures[featureDefaultUTCTimeZone]; !found {
e, err = Lib(timeUTCLibrary{})(e)
if err != nil {
return nil, err
}
// record that the feature has been applied since it will generate declarations
// and functions which will be propagated on Extend() calls and which should only
// be registered once.
e.appliedFeatures[featureDefaultUTCTimeZone] = true
}
}
// Initialize all of the functions configured within the environment.
for _, fn := range e.functions {
err = fn.init()
if err != nil {
return nil, err
}
}
// Configure the parser.
prsrOpts := []parser.Option{parser.Macros(e.macros...)}
if e.HasFeature(featureEnableMacroCallTracking) {
@@ -446,8 +495,7 @@ func (e *Env) configure(opts []EnvOption) (*Env, error) {
return nil, err
}
// The simplest way to eagerly validate declarations on environment creation is to compile
// a dummy program and check for the presence of e.chkErr being non-nil.
// Ensure that the checker init happens eagerly rather than lazily.
if e.HasFeature(featureEagerlyValidateDeclarations) {
err := e.initChecker()
if err != nil {
@@ -473,11 +521,26 @@ func (e *Env) initChecker() error {
e.chkErr = err
return
}
// Add the statically configured declarations.
err = ce.Add(e.declarations...)
if err != nil {
e.chkErr = err
return
}
// Add the function declarations which are derived from the FunctionDecl instances.
for _, fn := range e.functions {
fnDecl, err := functionDeclToExprDecl(fn)
if err != nil {
e.chkErr = err
return
}
err = ce.Add(fnDecl)
if err != nil {
e.chkErr = err
return
}
}
// Add function declarations here separately.
e.chk = ce
})
return e.chkErr

View File

@@ -241,7 +241,7 @@ func ValueToRefValue(adapter ref.TypeAdapter, v *exprpb.Value) (ref.Val, error)
if err != nil {
return nil, err
}
return adapter.NativeToValue(msg.(proto.Message)), nil
return adapter.NativeToValue(msg), nil
case *exprpb.Value_MapValue:
m := v.GetMapValue()
entries := make(map[ref.Val]ref.Val)

View File

@@ -15,9 +15,15 @@
package cel
import (
"strconv"
"strings"
"time"
"github.com/google/cel-go/checker"
"github.com/google/cel-go/common/overloads"
"github.com/google/cel-go/common/types"
"github.com/google/cel-go/common/types/ref"
"github.com/google/cel-go/interpreter/functions"
"github.com/google/cel-go/parser"
)
// Library provides a collection of EnvOption and ProgramOption values used to configure a CEL
@@ -65,7 +71,7 @@ type stdLibrary struct{}
func (stdLibrary) CompileOptions() []EnvOption {
return []EnvOption{
Declarations(checker.StandardDeclarations()...),
Macros(parser.AllMacros...),
Macros(StandardMacros...),
}
}
@@ -75,3 +81,263 @@ func (stdLibrary) ProgramOptions() []ProgramOption {
Functions(functions.StandardOverloads()...),
}
}
type timeUTCLibrary struct{}
func (timeUTCLibrary) CompileOptions() []EnvOption {
return timeOverloadDeclarations
}
func (timeUTCLibrary) ProgramOptions() []ProgramOption {
return []ProgramOption{}
}
// Declarations and functions which enable using UTC on time.Time inputs when the timezone is unspecified
// in the CEL expression.
var (
utcTZ = types.String("UTC")
timeOverloadDeclarations = []EnvOption{
Function(overloads.TimeGetHours,
MemberOverload(overloads.DurationToHours, []*Type{DurationType}, IntType,
UnaryBinding(func(dur ref.Val) ref.Val {
d := dur.(types.Duration)
return types.Int(d.Hours())
}))),
Function(overloads.TimeGetMinutes,
MemberOverload(overloads.DurationToMinutes, []*Type{DurationType}, IntType,
UnaryBinding(func(dur ref.Val) ref.Val {
d := dur.(types.Duration)
return types.Int(d.Minutes())
}))),
Function(overloads.TimeGetSeconds,
MemberOverload(overloads.DurationToSeconds, []*Type{DurationType}, IntType,
UnaryBinding(func(dur ref.Val) ref.Val {
d := dur.(types.Duration)
return types.Int(d.Seconds())
}))),
Function(overloads.TimeGetMilliseconds,
MemberOverload(overloads.DurationToMilliseconds, []*Type{DurationType}, IntType,
UnaryBinding(func(dur ref.Val) ref.Val {
d := dur.(types.Duration)
return types.Int(d.Milliseconds())
}))),
Function(overloads.TimeGetFullYear,
MemberOverload(overloads.TimestampToYear, []*Type{TimestampType}, IntType,
UnaryBinding(func(ts ref.Val) ref.Val {
return timestampGetFullYear(ts, utcTZ)
}),
),
MemberOverload(overloads.TimestampToYearWithTz, []*Type{TimestampType, StringType}, IntType,
BinaryBinding(timestampGetFullYear),
),
),
Function(overloads.TimeGetMonth,
MemberOverload(overloads.TimestampToMonth, []*Type{TimestampType}, IntType,
UnaryBinding(func(ts ref.Val) ref.Val {
return timestampGetMonth(ts, utcTZ)
}),
),
MemberOverload(overloads.TimestampToMonthWithTz, []*Type{TimestampType, StringType}, IntType,
BinaryBinding(timestampGetMonth),
),
),
Function(overloads.TimeGetDayOfYear,
MemberOverload(overloads.TimestampToDayOfYear, []*Type{TimestampType}, IntType,
UnaryBinding(func(ts ref.Val) ref.Val {
return timestampGetDayOfYear(ts, utcTZ)
}),
),
MemberOverload(overloads.TimestampToDayOfYearWithTz, []*Type{TimestampType, StringType}, IntType,
BinaryBinding(func(ts, tz ref.Val) ref.Val {
return timestampGetDayOfYear(ts, tz)
}),
),
),
Function(overloads.TimeGetDayOfMonth,
MemberOverload(overloads.TimestampToDayOfMonthZeroBased, []*Type{TimestampType}, IntType,
UnaryBinding(func(ts ref.Val) ref.Val {
return timestampGetDayOfMonthZeroBased(ts, utcTZ)
}),
),
MemberOverload(overloads.TimestampToDayOfMonthZeroBasedWithTz, []*Type{TimestampType, StringType}, IntType,
BinaryBinding(timestampGetDayOfMonthZeroBased),
),
),
Function(overloads.TimeGetDate,
MemberOverload(overloads.TimestampToDayOfMonthOneBased, []*Type{TimestampType}, IntType,
UnaryBinding(func(ts ref.Val) ref.Val {
return timestampGetDayOfMonthOneBased(ts, utcTZ)
}),
),
MemberOverload(overloads.TimestampToDayOfMonthOneBasedWithTz, []*Type{TimestampType, StringType}, IntType,
BinaryBinding(timestampGetDayOfMonthOneBased),
),
),
Function(overloads.TimeGetDayOfWeek,
MemberOverload(overloads.TimestampToDayOfWeek, []*Type{TimestampType}, IntType,
UnaryBinding(func(ts ref.Val) ref.Val {
return timestampGetDayOfWeek(ts, utcTZ)
}),
),
MemberOverload(overloads.TimestampToDayOfWeekWithTz, []*Type{TimestampType, StringType}, IntType,
BinaryBinding(timestampGetDayOfWeek),
),
),
Function(overloads.TimeGetHours,
MemberOverload(overloads.TimestampToHours, []*Type{TimestampType}, IntType,
UnaryBinding(func(ts ref.Val) ref.Val {
return timestampGetHours(ts, utcTZ)
}),
),
MemberOverload(overloads.TimestampToHoursWithTz, []*Type{TimestampType, StringType}, IntType,
BinaryBinding(timestampGetHours),
),
),
Function(overloads.TimeGetMinutes,
MemberOverload(overloads.TimestampToMinutes, []*Type{TimestampType}, IntType,
UnaryBinding(func(ts ref.Val) ref.Val {
return timestampGetMinutes(ts, utcTZ)
}),
),
MemberOverload(overloads.TimestampToMinutesWithTz, []*Type{TimestampType, StringType}, IntType,
BinaryBinding(timestampGetMinutes),
),
),
Function(overloads.TimeGetSeconds,
MemberOverload(overloads.TimestampToSeconds, []*Type{TimestampType}, IntType,
UnaryBinding(func(ts ref.Val) ref.Val {
return timestampGetSeconds(ts, utcTZ)
}),
),
MemberOverload(overloads.TimestampToSecondsWithTz, []*Type{TimestampType, StringType}, IntType,
BinaryBinding(timestampGetSeconds),
),
),
Function(overloads.TimeGetMilliseconds,
MemberOverload(overloads.TimestampToMilliseconds, []*Type{TimestampType}, IntType,
UnaryBinding(func(ts ref.Val) ref.Val {
return timestampGetMilliseconds(ts, utcTZ)
}),
),
MemberOverload(overloads.TimestampToMillisecondsWithTz, []*Type{TimestampType, StringType}, IntType,
BinaryBinding(timestampGetMilliseconds),
),
),
}
)
func timestampGetFullYear(ts, tz ref.Val) ref.Val {
t, err := inTimeZone(ts, tz)
if err != nil {
return types.NewErr(err.Error())
}
return types.Int(t.Year())
}
func timestampGetMonth(ts, tz ref.Val) ref.Val {
t, err := inTimeZone(ts, tz)
if err != nil {
return types.NewErr(err.Error())
}
// CEL spec indicates that the month should be 0-based, but the Time value
// for Month() is 1-based.
return types.Int(t.Month() - 1)
}
func timestampGetDayOfYear(ts, tz ref.Val) ref.Val {
t, err := inTimeZone(ts, tz)
if err != nil {
return types.NewErr(err.Error())
}
return types.Int(t.YearDay() - 1)
}
func timestampGetDayOfMonthZeroBased(ts, tz ref.Val) ref.Val {
t, err := inTimeZone(ts, tz)
if err != nil {
return types.NewErr(err.Error())
}
return types.Int(t.Day() - 1)
}
func timestampGetDayOfMonthOneBased(ts, tz ref.Val) ref.Val {
t, err := inTimeZone(ts, tz)
if err != nil {
return types.NewErr(err.Error())
}
return types.Int(t.Day())
}
func timestampGetDayOfWeek(ts, tz ref.Val) ref.Val {
t, err := inTimeZone(ts, tz)
if err != nil {
return types.NewErr(err.Error())
}
return types.Int(t.Weekday())
}
func timestampGetHours(ts, tz ref.Val) ref.Val {
t, err := inTimeZone(ts, tz)
if err != nil {
return types.NewErr(err.Error())
}
return types.Int(t.Hour())
}
func timestampGetMinutes(ts, tz ref.Val) ref.Val {
t, err := inTimeZone(ts, tz)
if err != nil {
return types.NewErr(err.Error())
}
return types.Int(t.Minute())
}
func timestampGetSeconds(ts, tz ref.Val) ref.Val {
t, err := inTimeZone(ts, tz)
if err != nil {
return types.NewErr(err.Error())
}
return types.Int(t.Second())
}
func timestampGetMilliseconds(ts, tz ref.Val) ref.Val {
t, err := inTimeZone(ts, tz)
if err != nil {
return types.NewErr(err.Error())
}
return types.Int(t.Nanosecond() / 1000000)
}
func inTimeZone(ts, tz ref.Val) (time.Time, error) {
t := ts.(types.Timestamp)
val := string(tz.(types.String))
ind := strings.Index(val, ":")
if ind == -1 {
loc, err := time.LoadLocation(val)
if err != nil {
return time.Time{}, err
}
return t.In(loc), nil
}
// If the input is not the name of a timezone (for example, 'US/Central'), it should be a numerical offset from UTC
// in the format ^(+|-)(0[0-9]|1[0-4]):[0-5][0-9]$. The numerical input is parsed in terms of hours and minutes.
hr, err := strconv.Atoi(string(val[0:ind]))
if err != nil {
return time.Time{}, err
}
min, err := strconv.Atoi(string(val[ind+1:]))
if err != nil {
return time.Time{}, err
}
var offset int
if string(val[0]) == "-" {
offset = hr*60 - min
} else {
offset = hr*60 + min
}
secondsEastOfUTC := int((time.Duration(offset) * time.Minute).Seconds())
timezone := time.FixedZone("", secondsEastOfUTC)
return t.In(timezone), nil
}

139
vendor/github.com/google/cel-go/cel/macro.go generated vendored Normal file
View File

@@ -0,0 +1,139 @@
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cel
import (
"github.com/google/cel-go/common"
"github.com/google/cel-go/parser"
exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
)
// Macro describes a function signature to match and the MacroExpander to apply.
//
// Note: when a Macro should apply to multiple overloads (based on arg count) of a given function,
// a Macro should be created per arg-count or as a var arg macro.
type Macro = parser.Macro
// MacroExpander converts a call and its associated arguments into a new CEL abstract syntax tree, or an error
// if the input arguments are not suitable for the expansion requirements for the macro in question.
//
// The MacroExpander accepts as arguments a MacroExprHelper as well as the arguments used in the function call
// and produces as output an Expr ast node.
//
// Note: when the Macro.IsReceiverStyle() method returns true, the target argument will be nil.
type MacroExpander = parser.MacroExpander
// MacroExprHelper exposes helper methods for creating new expressions within a CEL abstract syntax tree.
type MacroExprHelper = parser.ExprHelper
// NewGlobalMacro creates a Macro for a global function with the specified arg count.
func NewGlobalMacro(function string, argCount int, expander MacroExpander) Macro {
return parser.NewGlobalMacro(function, argCount, expander)
}
// NewReceiverMacro creates a Macro for a receiver function matching the specified arg count.
func NewReceiverMacro(function string, argCount int, expander MacroExpander) Macro {
return parser.NewReceiverMacro(function, argCount, expander)
}
// NewGlobalVarArgMacro creates a Macro for a global function with a variable arg count.
func NewGlobalVarArgMacro(function string, expander MacroExpander) Macro {
return parser.NewGlobalVarArgMacro(function, expander)
}
// NewReceiverVarArgMacro creates a Macro for a receiver function matching a variable arg count.
func NewReceiverVarArgMacro(function string, expander MacroExpander) Macro {
return parser.NewReceiverVarArgMacro(function, expander)
}
// HasMacroExpander expands the input call arguments into a presence test, e.g. has(<operand>.field)
func HasMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) {
return parser.MakeHas(meh, target, args)
}
// ExistsMacroExpander expands the input call arguments into a comprehension that returns true if any of the
// elements in the range match the predicate expressions:
// <iterRange>.exists(<iterVar>, <predicate>)
func ExistsMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) {
return parser.MakeExists(meh, target, args)
}
// ExistsOneMacroExpander expands the input call arguments into a comprehension that returns true if exactly
// one of the elements in the range match the predicate expressions:
// <iterRange>.exists_one(<iterVar>, <predicate>)
func ExistsOneMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) {
return parser.MakeExistsOne(meh, target, args)
}
// MapMacroExpander expands the input call arguments into a comprehension that transforms each element in the
// input to produce an output list.
//
// There are two call patterns supported by map:
// <iterRange>.map(<iterVar>, <transform>)
// <iterRange>.map(<iterVar>, <predicate>, <transform>)
// In the second form only iterVar values which return true when provided to the predicate expression
// are transformed.
func MapMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) {
return parser.MakeMap(meh, target, args)
}
// FilterMacroExpander expands the input call arguments into a comprehension which produces a list which contains
// only elements which match the provided predicate expression:
// <iterRange>.filter(<iterVar>, <predicate>)
func FilterMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) {
return parser.MakeFilter(meh, target, args)
}
var (
// Aliases to each macro in the CEL standard environment.
// Note: reassigning these macro variables may result in undefined behavior.
// HasMacro expands "has(m.f)" which tests the presence of a field, avoiding the need to
// specify the field as a string.
HasMacro = parser.HasMacro
// AllMacro expands "range.all(var, predicate)" into a comprehension which ensures that all
// elements in the range satisfy the predicate.
AllMacro = parser.AllMacro
// ExistsMacro expands "range.exists(var, predicate)" into a comprehension which ensures that
// some element in the range satisfies the predicate.
ExistsMacro = parser.ExistsMacro
// ExistsOneMacro expands "range.exists_one(var, predicate)", which is true if for exactly one
// element in range the predicate holds.
ExistsOneMacro = parser.ExistsOneMacro
// MapMacro expands "range.map(var, function)" into a comprehension which applies the function
// to each element in the range to produce a new list.
MapMacro = parser.MapMacro
// MapFilterMacro expands "range.map(var, predicate, function)" into a comprehension which
// first filters the elements in the range by the predicate, then applies the transform function
// to produce a new list.
MapFilterMacro = parser.MapFilterMacro
// FilterMacro expands "range.filter(var, predicate)" into a comprehension which filters
// elements in the range, producing a new list from the elements that satisfy the predicate.
FilterMacro = parser.FilterMacro
// StandardMacros provides an alias to all the CEL macros defined in the standard environment.
StandardMacros = []Macro{
HasMacro, AllMacro, ExistsMacro, ExistsOneMacro, MapMacro, MapFilterMacro, FilterMacro,
}
// NoMacros provides an alias to an empty list of macros
NoMacros = []Macro{}
)

View File

@@ -29,7 +29,6 @@ import (
"github.com/google/cel-go/common/types/ref"
"github.com/google/cel-go/interpreter"
"github.com/google/cel-go/interpreter/functions"
"github.com/google/cel-go/parser"
exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
descpb "google.golang.org/protobuf/types/descriptorpb"
@@ -57,6 +56,11 @@ const (
// Enable eager validation of declarations to ensure that Env values created
// with `Extend` inherit a validated list of declarations from the parent Env.
featureEagerlyValidateDeclarations
// Enable the use of the default UTC timezone when a timezone is not specified
// on a CEL timestamp operation. This fixes the scenario where the input time
// is not already in UTC.
featureDefaultUTCTimeZone
)
// EnvOption is a functional interface for configuring the environment.
@@ -68,7 +72,7 @@ type EnvOption func(e *Env) (*Env, error)
// comprehensions such as `all` and `exists` are enabled only via macros.
func ClearMacros() EnvOption {
return func(e *Env) (*Env, error) {
e.macros = parser.NoMacros
e.macros = NoMacros
return e, nil
}
}
@@ -99,8 +103,6 @@ func CustomTypeProvider(provider ref.TypeProvider) EnvOption {
// for the environment. The NewEnv call builds on top of the standard CEL declarations. For a
// purely custom set of declarations use NewCustomEnv.
func Declarations(decls ...*exprpb.Decl) EnvOption {
// TODO: provide an alternative means of specifying declarations that doesn't refer
// to the underlying proto implementations.
return func(e *Env) (*Env, error) {
e.declarations = append(e.declarations, decls...)
return e, nil
@@ -132,7 +134,7 @@ func HomogeneousAggregateLiterals() EnvOption {
// Macros option extends the macro set configured in the environment.
//
// Note: This option must be specified after ClearMacros if used together.
func Macros(macros ...parser.Macro) EnvOption {
func Macros(macros ...Macro) EnvOption {
return func(e *Env) (*Env, error) {
e.macros = append(e.macros, macros...)
return e, nil
@@ -332,6 +334,9 @@ func CustomDecorator(dec interpreter.InterpretableDecorator) ProgramOption {
}
// Functions adds function overloads that extend or override the set of CEL built-ins.
//
// Deprecated: use Function() instead to declare the function, its overload signatures,
// and the overload implementations.
func Functions(funcs ...*functions.Overload) ProgramOption {
return func(p *prog) (*prog, error) {
if err := p.dispatcher.Add(funcs...); err != nil {
@@ -342,7 +347,8 @@ func Functions(funcs ...*functions.Overload) ProgramOption {
}
// Globals sets the global variable values for a given program. These values may be shadowed by
// variables with the same name provided to the Eval() call.
// variables with the same name provided to the Eval() call. If Globals is used in a Library with
// a Lib EnvOption, vars may shadow variables provided by previously added libraries.
//
// The vars value may either be an `interpreter.Activation` instance or a `map[string]interface{}`.
func Globals(vars interface{}) ProgramOption {
@@ -351,6 +357,9 @@ func Globals(vars interface{}) ProgramOption {
if err != nil {
return nil, err
}
if p.defaultVars != nil {
defaultVars = interpreter.NewHierarchicalActivation(p.defaultVars, defaultVars)
}
p.defaultVars = defaultVars
return p, nil
}
@@ -438,7 +447,7 @@ func CostLimit(costLimit uint64) ProgramOption {
}
func fieldToCELType(field protoreflect.FieldDescriptor) (*exprpb.Type, error) {
if field.Kind() == protoreflect.MessageKind {
if field.Kind() == protoreflect.MessageKind || field.Kind() == protoreflect.GroupKind {
msgName := (string)(field.Message().FullName())
wellKnownType, found := pb.CheckedWellKnowns[msgName]
if found {
@@ -519,6 +528,12 @@ func CrossTypeNumericComparisons(enabled bool) EnvOption {
return features(featureCrossTypeNumericComparisons, enabled)
}
// DefaultUTCTimeZone ensures that time-based operations use the UTC timezone rather than the
// input time's local timezone.
func DefaultUTCTimeZone(enabled bool) EnvOption {
return features(featureDefaultUTCTimeZone, enabled)
}
// features sets the given feature flags. See list of Feature constants above.
func features(flag int, enabled bool) EnvOption {
return func(e *Env) (*Env, error) {

View File

@@ -168,6 +168,18 @@ func newProgram(e *Env, ast *Ast, opts []ProgramOption) (Program, error) {
}
}
// Add the function bindings created via Function() options.
for _, fn := range e.functions {
bindings, err := fn.bindings()
if err != nil {
return nil, err
}
err = disp.Add(bindings...)
if err != nil {
return nil, err
}
}
// Set the attribute factory after the options have been set.
var attrFactory interpreter.AttributeFactory
if p.evalOpts&OptPartialEval == OptPartialEval {

View File

@@ -54,7 +54,7 @@ go_test(
"//test:go_default_library",
"//test/proto2pb:go_default_library",
"//test/proto3pb:go_default_library",
"@com_github_antlr//runtime/Go/antlr:go_default_library",
"@com_github_antlr_antlr4_runtime_go_antlr//:go_default_library",
"@org_golang_google_protobuf//proto:go_default_library",
],
)

View File

@@ -187,7 +187,7 @@ func (c *checker) checkSelect(e *exprpb.Expr) {
// Interpret as field selection, first traversing down the operand.
c.check(sel.Operand)
targetType := c.getType(sel.Operand)
targetType := substitute(c.mappings, c.getType(sel.Operand), false)
// Assume error type by default as most types do not support field selection.
resultType := decls.Error
switch kindOf(targetType) {
@@ -224,7 +224,7 @@ func (c *checker) checkSelect(e *exprpb.Expr) {
if sel.TestOnly {
resultType = decls.Bool
}
c.setType(e, resultType)
c.setType(e, substitute(c.mappings, resultType, false))
}
func (c *checker) checkCall(e *exprpb.Expr) {
@@ -469,18 +469,18 @@ func (c *checker) checkCreateMessage(e *exprpb.Expr) {
func (c *checker) checkComprehension(e *exprpb.Expr) {
comp := e.GetComprehensionExpr()
c.check(comp.IterRange)
c.check(comp.AccuInit)
accuType := c.getType(comp.AccuInit)
rangeType := c.getType(comp.IterRange)
c.check(comp.GetIterRange())
c.check(comp.GetAccuInit())
accuType := c.getType(comp.GetAccuInit())
rangeType := substitute(c.mappings, c.getType(comp.GetIterRange()), false)
var varType *exprpb.Type
switch kindOf(rangeType) {
case kindList:
varType = rangeType.GetListType().ElemType
varType = rangeType.GetListType().GetElemType()
case kindMap:
// Ranges over the keys.
varType = rangeType.GetMapType().KeyType
varType = rangeType.GetMapType().GetKeyType()
case kindDyn, kindError, kindTypeParam:
// Set the range type to DYN to prevent assignment to a potentially incorrect type
// at a later point in type-checking. The isAssignable call will update the type
@@ -489,28 +489,28 @@ func (c *checker) checkComprehension(e *exprpb.Expr) {
// Set the range iteration variable to type DYN as well.
varType = decls.Dyn
default:
c.errors.notAComprehensionRange(c.location(comp.IterRange), rangeType)
c.errors.notAComprehensionRange(c.location(comp.GetIterRange()), rangeType)
varType = decls.Error
}
// Create a scope for the comprehension since it has a local accumulation variable.
// This scope will contain the accumulation variable used to compute the result.
c.env = c.env.enterScope()
c.env.Add(decls.NewVar(comp.AccuVar, accuType))
c.env.Add(decls.NewVar(comp.GetAccuVar(), accuType))
// Create a block scope for the loop.
c.env = c.env.enterScope()
c.env.Add(decls.NewVar(comp.IterVar, varType))
c.env.Add(decls.NewVar(comp.GetIterVar(), varType))
// Check the variable references in the condition and step.
c.check(comp.LoopCondition)
c.assertType(comp.LoopCondition, decls.Bool)
c.check(comp.LoopStep)
c.assertType(comp.LoopStep, accuType)
c.check(comp.GetLoopCondition())
c.assertType(comp.GetLoopCondition(), decls.Bool)
c.check(comp.GetLoopStep())
c.assertType(comp.GetLoopStep(), accuType)
// Exit the loop's block scope before checking the result.
c.env = c.env.exitScope()
c.check(comp.Result)
c.check(comp.GetResult())
// Exit the comprehension scope.
c.env = c.env.exitScope()
c.setType(e, c.getType(comp.Result))
c.setType(e, substitute(c.mappings, c.getType(comp.GetResult()), false))
}
// Checks compatibility of joined types, and returns the most general common type.

View File

@@ -181,8 +181,7 @@ func (e *Env) addOverload(f *exprpb.Decl, overload *exprpb.Decl_FunctionDecl_Ove
overload.GetParams()...)
overloadErased := substitute(emptyMappings, overloadFunction, true)
for _, existing := range function.GetOverloads() {
existingFunction := decls.NewFunctionType(existing.GetResultType(),
existing.GetParams()...)
existingFunction := decls.NewFunctionType(existing.GetResultType(), existing.GetParams()...)
existingErased := substitute(emptyMappings, existingFunction, true)
overlap := isAssignable(emptyMappings, overloadErased, existingErased) != nil ||
isAssignable(emptyMappings, existingErased, overloadErased) != nil
@@ -213,18 +212,33 @@ func (e *Env) addOverload(f *exprpb.Decl, overload *exprpb.Decl_FunctionDecl_Ove
// Adds a function decl if one doesn't already exist, then adds all overloads from the Decl.
// If overload overlaps with an existing overload, adds to the errors in the Env instead.
func (e *Env) setFunction(decl *exprpb.Decl) []errorMsg {
errorMsgs := make([]errorMsg, 0)
overloads := decl.GetFunction().GetOverloads()
current := e.declarations.FindFunction(decl.Name)
if current == nil {
//Add the function declaration without overloads and check the overloads below.
current = decls.NewFunction(decl.Name)
} else {
existingOverloads := map[string]*exprpb.Decl_FunctionDecl_Overload{}
for _, overload := range current.GetFunction().GetOverloads() {
existingOverloads[overload.GetOverloadId()] = overload
}
newOverloads := []*exprpb.Decl_FunctionDecl_Overload{}
for _, overload := range overloads {
existing, found := existingOverloads[overload.GetOverloadId()]
if !found || !proto.Equal(existing, overload) {
newOverloads = append(newOverloads, overload)
}
}
overloads = newOverloads
if len(newOverloads) == 0 {
return errorMsgs
}
// Copy on write since we don't know where this original definition came from.
current = proto.Clone(current).(*exprpb.Decl)
}
e.declarations.SetFunction(current)
errorMsgs := make([]errorMsg, 0)
for _, overload := range decl.GetFunction().GetOverloads() {
for _, overload := range overloads {
errorMsgs = append(errorMsgs, e.addOverload(current, overload)...)
}
return errorMsgs
@@ -235,6 +249,9 @@ func (e *Env) setFunction(decl *exprpb.Decl) []errorMsg {
func (e *Env) addIdent(decl *exprpb.Decl) errorMsg {
current := e.declarations.FindIdentInScope(decl.Name)
if current != nil {
if proto.Equal(current, decl) {
return ""
}
return overlappingIdentifierError(decl.Name)
}
e.declarations.AddIdent(decl)

View File

@@ -88,6 +88,8 @@ func FormatCheckedType(t *exprpb.Type) string {
FormatCheckedType(decls.NewPrimitiveType(t.GetWrapper())))
case kindError:
return "!error!"
case kindTypeParam:
return t.GetTypeParam()
}
return t.String()
}
@@ -238,14 +240,25 @@ func internalIsAssignable(m *mapping, t1 *exprpb.Type, t2 *exprpb.Type) bool {
// - t2 has a type substitution (t2sub) assignable to t1
// - t2 does not occur within t1.
func isValidTypeSubstitution(m *mapping, t1, t2 *exprpb.Type) (valid, hasSub bool) {
// Early return if the t1 and t2 are the same instance.
kind1, kind2 := kindOf(t1), kindOf(t2)
if kind1 == kind2 && (t1 == t2 || proto.Equal(t1, t2)) {
return true, true
}
if t2Sub, found := m.find(t2); found {
kind1, kind2 := kindOf(t1), kindOf(t2)
if kind1 == kind2 && proto.Equal(t1, t2Sub) {
// Early return if t1 and t2Sub are the same instance as otherwise the mapping
// might mark a type as being a subtitution for itself.
if kind1 == kindOf(t2Sub) && (t1 == t2Sub || proto.Equal(t1, t2Sub)) {
return true, true
}
// If the types are compatible, pick the more general type and return true
if internalIsAssignable(m, t1, t2Sub) {
m.add(t2, mostGeneral(t1, t2Sub))
t2New := mostGeneral(t1, t2Sub)
// only update the type reference map if the target type does not occur within it.
if notReferencedIn(m, t2, t2New) {
m.add(t2, t2New)
}
// acknowledge the type agreement, and that the substitution is already tracked.
return true, true
}
return false, true

View File

@@ -31,6 +31,9 @@ type Error struct {
const (
dot = "."
ind = "^"
// maxSnippetLength is the largest number of characters which can be rendered in an error message snippet.
maxSnippetLength = 16384
)
var (
@@ -45,7 +48,7 @@ func (e *Error) ToDisplayString(source Source) string {
e.Location.Line(),
e.Location.Column()+1, // add one to the 0-based column for display
e.Message)
if snippet, found := source.Snippet(e.Location.Line()); found {
if snippet, found := source.Snippet(e.Location.Line()); found && len(snippet) <= maxSnippetLength {
snippet := strings.Replace(snippet, "\t", " ", -1)
srcLine := "\n | " + snippet
var bytes = []byte(snippet)

View File

@@ -17,23 +17,32 @@ package common
import (
"fmt"
"sort"
"strings"
)
// Errors type which contains a list of errors observed during parsing.
type Errors struct {
errors []Error
source Source
errors []Error
source Source
numErrors int
maxErrorsToReport int
}
// NewErrors creates a new instance of the Errors type.
func NewErrors(source Source) *Errors {
return &Errors{
errors: []Error{},
source: source}
errors: []Error{},
source: source,
maxErrorsToReport: 100,
}
}
// ReportError records an error at a source location.
func (e *Errors) ReportError(l Location, format string, args ...interface{}) {
e.numErrors++
if e.numErrors > e.maxErrorsToReport {
return
}
err := Error{
Location: l,
Message: fmt.Sprintf(format, args...),
@@ -46,18 +55,28 @@ func (e *Errors) GetErrors() []Error {
return e.errors[:]
}
// Append takes an Errors object as input creates a new Errors object with the current and input
// errors.
// Append creates a new Errors object with the current and input errors.
func (e *Errors) Append(errs []Error) *Errors {
return &Errors{
errors: append(e.errors, errs...),
source: e.source,
errors: append(e.errors, errs...),
source: e.source,
numErrors: e.numErrors + len(errs),
maxErrorsToReport: e.maxErrorsToReport,
}
}
// ToDisplayString returns the error set to a newline delimited string.
func (e *Errors) ToDisplayString() string {
var result = ""
errorsInString := e.maxErrorsToReport
if e.numErrors > e.maxErrorsToReport {
// add one more error to indicate the number of errors truncated.
errorsInString++
} else {
// otherwise the error set will just contain the number of errors.
errorsInString = e.numErrors
}
result := make([]string, errorsInString)
sort.SliceStable(e.errors, func(i, j int) bool {
ei := e.errors[i].Location
ej := e.errors[j].Location
@@ -65,10 +84,14 @@ func (e *Errors) ToDisplayString() string {
(ei.Line() == ej.Line() && ei.Column() < ej.Column())
})
for i, err := range e.errors {
if i >= 1 {
result += "\n"
// This can happen during the append of two errors objects
if i >= e.maxErrorsToReport {
break
}
result += err.ToDisplayString(e.source)
result[i] = err.ToDisplayString(e.source)
}
return result
if e.numErrors > e.maxErrorsToReport {
result[e.maxErrorsToReport] = fmt.Sprintf("%d more errors were truncated", e.numErrors-e.maxErrorsToReport)
}
return strings.Join(result, "\n")
}

View File

@@ -141,3 +141,13 @@ func Precedence(symbol string) int {
}
return op.precedence
}
// Arity returns the number of argument the operator takes
// -1 is returned if an undefined symbol is provided
func Arity(symbol string) int {
op, found := operatorMap[symbol]
if !found {
return -1
}
return op.arity
}

View File

@@ -131,9 +131,11 @@ func (b Bool) Value() interface{} {
// IsBool returns whether the input ref.Val or ref.Type is equal to BoolType.
func IsBool(elem ref.Val) bool {
switch elem.(type) {
switch v := elem.(type) {
case Bool:
return true
case ref.Val:
return v.Type() == BoolType
default:
return false
}

View File

@@ -72,16 +72,13 @@ func MaybeNoSuchOverloadErr(val ref.Val) ref.Val {
return ValOrErr(val, "no such overload")
}
// ValOrErr either returns the existing error or create a new one.
// ValOrErr either returns the existing error or creates a new one.
// TODO: Audit the use of this function and standardize the error messages and codes.
func ValOrErr(val ref.Val, format string, args ...interface{}) ref.Val {
if val == nil {
if val == nil || !IsUnknownOrError(val) {
return NewErr(format, args...)
}
if IsUnknownOrError(val) {
return val
}
return NewErr(format, args...)
return val
}
// wrapErr wraps an existing Go error value into a CEL Err value.

View File

@@ -53,24 +53,24 @@ func NewObject(adapter ref.TypeAdapter,
}
func (o *protoObj) ConvertToNative(typeDesc reflect.Type) (interface{}, error) {
pb := o.value
if reflect.TypeOf(pb).AssignableTo(typeDesc) {
return pb, nil
srcPB := o.value
if reflect.TypeOf(srcPB).AssignableTo(typeDesc) {
return srcPB, nil
}
if reflect.TypeOf(o).AssignableTo(typeDesc) {
return o, nil
}
switch typeDesc {
case anyValueType:
_, isAny := pb.(*anypb.Any)
_, isAny := srcPB.(*anypb.Any)
if isAny {
return pb, nil
return srcPB, nil
}
return anypb.New(pb)
return anypb.New(srcPB)
case jsonValueType:
// Marshal the proto to JSON first, and then rehydrate as protobuf.Value as there is no
// support for direct conversion from proto.Message to protobuf.Value.
bytes, err := protojson.Marshal(pb)
bytes, err := protojson.Marshal(srcPB)
if err != nil {
return nil, err
}
@@ -88,7 +88,10 @@ func (o *protoObj) ConvertToNative(typeDesc reflect.Type) (interface{}, error) {
val := reflect.New(typeDesc.Elem()).Interface()
dstPB, ok := val.(proto.Message)
if ok {
proto.Merge(dstPB, pb)
err := pb.Merge(dstPB, srcPB)
if err != nil {
return nil, fmt.Errorf("type conversion error: %v", err)
}
return dstPB, nil
}
}

View File

@@ -21,7 +21,8 @@ import (
"google.golang.org/protobuf/encoding/protowire"
"google.golang.org/protobuf/proto"
"google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/types/known/anypb"
anypb "google.golang.org/protobuf/types/known/anypb"
)
// Equal returns whether two proto.Message instances are equal using the following criteria:

View File

@@ -17,6 +17,8 @@
package pb
import (
"fmt"
"google.golang.org/protobuf/proto"
"google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/reflect/protoregistry"
@@ -48,6 +50,31 @@ var (
}
)
// Merge will copy the source proto message into the destination, or error if the merge cannot be completed.
//
// Unlike the proto.Merge, this method will fallback to proto.Marshal/Unmarshal of the two proto messages do not
// share the same instance of their type descriptor.
func Merge(dstPB, srcPB proto.Message) error {
src, dst := srcPB.ProtoReflect(), dstPB.ProtoReflect()
if src.Descriptor() == dst.Descriptor() {
proto.Merge(dstPB, srcPB)
return nil
}
if src.Descriptor().FullName() != dst.Descriptor().FullName() {
return fmt.Errorf("pb.Merge() arguments must be the same type. got: %v, %v",
dst.Descriptor().FullName(), src.Descriptor().FullName())
}
bytes, err := proto.Marshal(srcPB)
if err != nil {
return fmt.Errorf("pb.Merge(dstPB, srcPB) failed to marshal source proto: %v", err)
}
err = proto.Unmarshal(bytes, dstPB)
if err != nil {
return fmt.Errorf("pb.Merge(dstPB, srcPB) failed to unmarshal to dest proto: %v", err)
}
return nil
}
// NewDb creates a new `pb.Db` with an empty type name to file description map.
func NewDb() *Db {
pbdb := &Db{

View File

@@ -87,7 +87,7 @@ func (td *TypeDescription) FieldByName(name string) (*FieldDescription, bool) {
// MaybeUnwrap accepts a proto message as input and unwraps it to a primitive CEL type if possible.
//
// This method returns the unwrapped value and 'true', else the original value and 'false'.
func (td *TypeDescription) MaybeUnwrap(msg proto.Message) (interface{}, bool) {
func (td *TypeDescription) MaybeUnwrap(msg proto.Message) (interface{}, bool, error) {
return unwrap(td, msg)
}
@@ -118,7 +118,7 @@ func NewFieldDescription(fieldDesc protoreflect.FieldDescriptor) *FieldDescripti
switch fieldDesc.Kind() {
case protoreflect.EnumKind:
reflectType = reflectTypeOf(protoreflect.EnumNumber(0))
case protoreflect.MessageKind:
case protoreflect.GroupKind, protoreflect.MessageKind:
zeroMsg = dynamicpb.NewMessage(fieldDesc.Message())
reflectType = reflectTypeOf(zeroMsg)
default:
@@ -248,8 +248,8 @@ func (fd *FieldDescription) GetFrom(target interface{}) (interface{}, error) {
return &Map{Map: fv, KeyType: fd.KeyType, ValueType: fd.ValueType}, nil
case protoreflect.Message:
// Make sure to unwrap well-known protobuf types before returning.
unwrapped, _ := fd.MaybeUnwrapDynamic(fv)
return unwrapped, nil
unwrapped, _, err := fd.MaybeUnwrapDynamic(fv)
return unwrapped, err
default:
return fv, nil
}
@@ -267,7 +267,8 @@ func (fd *FieldDescription) IsMap() bool {
// IsMessage returns true if the field is of message type.
func (fd *FieldDescription) IsMessage() bool {
return fd.desc.Kind() == protoreflect.MessageKind
kind := fd.desc.Kind()
return kind == protoreflect.MessageKind || kind == protoreflect.GroupKind
}
// IsOneof returns true if the field is declared within a oneof block.
@@ -288,7 +289,7 @@ func (fd *FieldDescription) IsList() bool {
//
// This function returns the unwrapped value and 'true' on success, or the original value
// and 'false' otherwise.
func (fd *FieldDescription) MaybeUnwrapDynamic(msg protoreflect.Message) (interface{}, bool) {
func (fd *FieldDescription) MaybeUnwrapDynamic(msg protoreflect.Message) (interface{}, bool, error) {
return unwrapDynamic(fd, msg)
}
@@ -316,7 +317,7 @@ func (fd *FieldDescription) Zero() proto.Message {
}
func (fd *FieldDescription) typeDefToType() *exprpb.Type {
if fd.desc.Kind() == protoreflect.MessageKind {
if fd.desc.Kind() == protoreflect.MessageKind || fd.desc.Kind() == protoreflect.GroupKind {
msgType := string(fd.desc.Message().FullName())
if wk, found := CheckedWellKnowns[msgType]; found {
return wk
@@ -361,63 +362,63 @@ func checkedWrap(t *exprpb.Type) *exprpb.Type {
// input message is a *dynamicpb.Message which obscures the typing information from Go.
//
// Returns the unwrapped value and 'true' if unwrapped, otherwise the input value and 'false'.
func unwrap(desc description, msg proto.Message) (interface{}, bool) {
func unwrap(desc description, msg proto.Message) (interface{}, bool, error) {
switch v := msg.(type) {
case *anypb.Any:
dynMsg, err := v.UnmarshalNew()
if err != nil {
return v, false
return v, false, err
}
return unwrapDynamic(desc, dynMsg.ProtoReflect())
case *dynamicpb.Message:
return unwrapDynamic(desc, v)
case *dpb.Duration:
return v.AsDuration(), true
return v.AsDuration(), true, nil
case *tpb.Timestamp:
return v.AsTime(), true
return v.AsTime(), true, nil
case *structpb.Value:
switch v.GetKind().(type) {
case *structpb.Value_BoolValue:
return v.GetBoolValue(), true
return v.GetBoolValue(), true, nil
case *structpb.Value_ListValue:
return v.GetListValue(), true
return v.GetListValue(), true, nil
case *structpb.Value_NullValue:
return structpb.NullValue_NULL_VALUE, true
return structpb.NullValue_NULL_VALUE, true, nil
case *structpb.Value_NumberValue:
return v.GetNumberValue(), true
return v.GetNumberValue(), true, nil
case *structpb.Value_StringValue:
return v.GetStringValue(), true
return v.GetStringValue(), true, nil
case *structpb.Value_StructValue:
return v.GetStructValue(), true
return v.GetStructValue(), true, nil
default:
return structpb.NullValue_NULL_VALUE, true
return structpb.NullValue_NULL_VALUE, true, nil
}
case *wrapperspb.BoolValue:
return v.GetValue(), true
return v.GetValue(), true, nil
case *wrapperspb.BytesValue:
return v.GetValue(), true
return v.GetValue(), true, nil
case *wrapperspb.DoubleValue:
return v.GetValue(), true
return v.GetValue(), true, nil
case *wrapperspb.FloatValue:
return float64(v.GetValue()), true
return float64(v.GetValue()), true, nil
case *wrapperspb.Int32Value:
return int64(v.GetValue()), true
return int64(v.GetValue()), true, nil
case *wrapperspb.Int64Value:
return v.GetValue(), true
return v.GetValue(), true, nil
case *wrapperspb.StringValue:
return v.GetValue(), true
return v.GetValue(), true, nil
case *wrapperspb.UInt32Value:
return uint64(v.GetValue()), true
return uint64(v.GetValue()), true, nil
case *wrapperspb.UInt64Value:
return v.GetValue(), true
return v.GetValue(), true, nil
}
return msg, false
return msg, false, nil
}
// unwrapDynamic unwraps a reflected protobuf Message value.
//
// Returns the unwrapped value and 'true' if unwrapped, otherwise the input value and 'false'.
func unwrapDynamic(desc description, refMsg protoreflect.Message) (interface{}, bool) {
func unwrapDynamic(desc description, refMsg protoreflect.Message) (interface{}, bool, error) {
msg := refMsg.Interface()
if !refMsg.IsValid() {
msg = desc.Zero()
@@ -432,18 +433,22 @@ func unwrapDynamic(desc description, refMsg protoreflect.Message) (interface{},
// unwrapped before being returned to the caller. Otherwise, the dynamic protobuf object
// represented by the Any will be returned.
unwrappedAny := &anypb.Any{}
proto.Merge(unwrappedAny, msg)
err := Merge(unwrappedAny, msg)
if err != nil {
return nil, false, err
}
dynMsg, err := unwrappedAny.UnmarshalNew()
if err != nil {
// Allow the error to move further up the stack as it should result in an type
// conversion error if the caller does not recover it somehow.
return unwrappedAny, true
return nil, false, err
}
// Attempt to unwrap the dynamic type, otherwise return the dynamic message.
if unwrapped, nested := unwrapDynamic(desc, dynMsg.ProtoReflect()); nested {
return unwrapped, true
unwrapped, nested, err := unwrapDynamic(desc, dynMsg.ProtoReflect())
if err == nil && nested {
return unwrapped, true, nil
}
return dynMsg, true
return dynMsg, true, err
case "google.protobuf.BoolValue",
"google.protobuf.BytesValue",
"google.protobuf.DoubleValue",
@@ -456,34 +461,49 @@ func unwrapDynamic(desc description, refMsg protoreflect.Message) (interface{},
// The msg value is ignored when dealing with wrapper types as they have a null or value
// behavior, rather than the standard zero value behavior of other proto message types.
if !refMsg.IsValid() {
return structpb.NullValue_NULL_VALUE, true
return structpb.NullValue_NULL_VALUE, true, nil
}
valueField := refMsg.Descriptor().Fields().ByName("value")
return refMsg.Get(valueField).Interface(), true
return refMsg.Get(valueField).Interface(), true, nil
case "google.protobuf.Duration":
unwrapped := &dpb.Duration{}
proto.Merge(unwrapped, msg)
return unwrapped.AsDuration(), true
err := Merge(unwrapped, msg)
if err != nil {
return nil, false, err
}
return unwrapped.AsDuration(), true, nil
case "google.protobuf.ListValue":
unwrapped := &structpb.ListValue{}
proto.Merge(unwrapped, msg)
return unwrapped, true
err := Merge(unwrapped, msg)
if err != nil {
return nil, false, err
}
return unwrapped, true, nil
case "google.protobuf.NullValue":
return structpb.NullValue_NULL_VALUE, true
return structpb.NullValue_NULL_VALUE, true, nil
case "google.protobuf.Struct":
unwrapped := &structpb.Struct{}
proto.Merge(unwrapped, msg)
return unwrapped, true
err := Merge(unwrapped, msg)
if err != nil {
return nil, false, err
}
return unwrapped, true, nil
case "google.protobuf.Timestamp":
unwrapped := &tpb.Timestamp{}
proto.Merge(unwrapped, msg)
return unwrapped.AsTime(), true
err := Merge(unwrapped, msg)
if err != nil {
return nil, false, err
}
return unwrapped.AsTime(), true, nil
case "google.protobuf.Value":
unwrapped := &structpb.Value{}
proto.Merge(unwrapped, msg)
err := Merge(unwrapped, msg)
if err != nil {
return nil, false, err
}
return unwrap(desc, unwrapped)
}
return msg, false
return msg, false, nil
}
// reflectTypeOf intercepts the reflect.Type call to ensure that dynamicpb.Message types preserve

View File

@@ -206,7 +206,10 @@ func (p *protoTypeRegistry) NativeToValue(value interface{}) ref.Val {
if !found {
return NewErr("unknown type: '%s'", typeName)
}
unwrapped, isUnwrapped := td.MaybeUnwrap(v)
unwrapped, isUnwrapped, err := td.MaybeUnwrap(v)
if err != nil {
return UnsupportedRefValConversionErr(v)
}
if isUnwrapped {
return p.NativeToValue(unwrapped)
}
@@ -394,7 +397,10 @@ func nativeToValue(a ref.TypeAdapter, value interface{}) (ref.Val, bool) {
if !found {
return nil, false
}
val, unwrapped := td.MaybeUnwrap(v)
val, unwrapped, err := td.MaybeUnwrap(v)
if err != nil {
return UnsupportedRefValConversionErr(v), true
}
if !unwrapped {
return nil, false
}

View File

@@ -12,8 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
// Package ref contains the reference interfaces used throughout the types
// components.
// Package ref contains the reference interfaces used throughout the types components.
package ref
import (
@@ -29,31 +28,27 @@ type Type interface {
// TypeName returns the qualified type name of the type.
//
// The type name is also used as the type's identifier name at type-check
// and interpretation time.
// The type name is also used as the type's identifier name at type-check and interpretation time.
TypeName() string
}
// Val interface defines the functions supported by all expression values.
// Val implementations may specialize the behavior of the value through the
// addition of traits.
// Val implementations may specialize the behavior of the value through the addition of traits.
type Val interface {
// ConvertToNative converts the Value to a native Go struct according to the
// reflected type description, or error if the conversion is not feasible.
ConvertToNative(typeDesc reflect.Type) (interface{}, error)
// ConvertToType supports type conversions between value types supported by
// the expression language.
// ConvertToType supports type conversions between value types supported by the expression language.
ConvertToType(typeValue Type) Val
// Equal returns true if the `other` value has the same type and content as
// the implementing struct.
// Equal returns true if the `other` value has the same type and content as the implementing struct.
Equal(other Val) Val
// Type returns the TypeValue of the value.
Type() Type
// Value returns the raw value of the instance which may not be directly
// compatible with the expression language types.
// Value returns the raw value of the instance which may not be directly compatible with the expression
// language types.
Value() interface{}
}

View File

@@ -299,7 +299,7 @@ func timeZone(tz ref.Val, visitor timestampVisitor) timestampVisitor {
if err != nil {
return wrapErr(err)
}
min, err := strconv.Atoi(string(val[ind+1]))
min, err := strconv.Atoi(string(val[ind+1:]))
if err != nil {
return wrapErr(err)
}

View File

@@ -15,12 +15,8 @@ go_library(
visibility = ["//visibility:public"],
deps = [
"//cel:go_default_library",
"//checker/decls:go_default_library",
"//common/types:go_default_library",
"//common/types/ref:go_default_library",
"//common/types/traits:go_default_library",
"//interpreter/functions:go_default_library",
"@org_golang_google_genproto//googleapis/api/expr/v1alpha1:go_default_library",
],
)

View File

@@ -16,12 +16,11 @@ package ext
import (
"encoding/base64"
"reflect"
"github.com/google/cel-go/cel"
"github.com/google/cel-go/checker/decls"
"github.com/google/cel-go/interpreter/functions"
exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
"github.com/google/cel-go/common/types"
"github.com/google/cel-go/common/types/ref"
)
// Encoders returns a cel.EnvOption to configure extended functions for string, byte, and object
@@ -57,42 +56,23 @@ type encoderLib struct{}
func (encoderLib) CompileOptions() []cel.EnvOption {
return []cel.EnvOption{
cel.Declarations(
decls.NewFunction("base64.decode",
decls.NewOverload("base64_decode_string",
[]*exprpb.Type{decls.String},
decls.Bytes)),
decls.NewFunction("base64.encode",
decls.NewOverload("base64_encode_bytes",
[]*exprpb.Type{decls.Bytes},
decls.String)),
),
cel.Function("base64.decode",
cel.Overload("base64_decode_string", []*cel.Type{cel.StringType}, cel.BytesType,
cel.UnaryBinding(func(str ref.Val) ref.Val {
s := str.(types.String)
return bytesOrError(base64DecodeString(string(s)))
}))),
cel.Function("base64.encode",
cel.Overload("base64_encode_bytes", []*cel.Type{cel.BytesType}, cel.StringType,
cel.UnaryBinding(func(bytes ref.Val) ref.Val {
b := bytes.(types.Bytes)
return stringOrError(base64EncodeBytes([]byte(b)))
}))),
}
}
func (encoderLib) ProgramOptions() []cel.ProgramOption {
wrappedBase64EncodeBytes := callInBytesOutString(base64EncodeBytes)
wrappedBase64DecodeString := callInStrOutBytes(base64DecodeString)
return []cel.ProgramOption{
cel.Functions(
&functions.Overload{
Operator: "base64.decode",
Unary: wrappedBase64DecodeString,
},
&functions.Overload{
Operator: "base64_decode_string",
Unary: wrappedBase64DecodeString,
},
&functions.Overload{
Operator: "base64.encode",
Unary: wrappedBase64EncodeBytes,
},
&functions.Overload{
Operator: "base64_encode_bytes",
Unary: wrappedBase64EncodeBytes,
},
),
}
return []cel.ProgramOption{}
}
func base64DecodeString(str string) ([]byte, error) {
@@ -102,3 +82,7 @@ func base64DecodeString(str string) ([]byte, error) {
func base64EncodeBytes(bytes []byte) (string, error) {
return base64.StdEncoding.EncodeToString(bytes), nil
}
var (
bytesListType = reflect.TypeOf([]byte{})
)

View File

@@ -17,287 +17,34 @@ package ext
import (
"github.com/google/cel-go/common/types"
"github.com/google/cel-go/common/types/ref"
"github.com/google/cel-go/common/types/traits"
"github.com/google/cel-go/interpreter/functions"
)
// function invocation guards for common call signatures within extension functions.
func callInBytesOutString(fn func([]byte) (string, error)) functions.UnaryOp {
return func(val ref.Val) ref.Val {
vVal, ok := val.(types.Bytes)
if !ok {
return types.MaybeNoSuchOverloadErr(val)
}
str, err := fn([]byte(vVal))
if err != nil {
return types.NewErr(err.Error())
}
return types.String(str)
func intOrError(i int64, err error) ref.Val {
if err != nil {
return types.NewErr(err.Error())
}
return types.Int(i)
}
func callInStrOutBytes(fn func(string) ([]byte, error)) functions.UnaryOp {
return func(val ref.Val) ref.Val {
vVal, ok := val.(types.String)
if !ok {
return types.MaybeNoSuchOverloadErr(val)
}
byt, err := fn(string(vVal))
if err != nil {
return types.NewErr(err.Error())
}
return types.Bytes(byt)
func bytesOrError(bytes []byte, err error) ref.Val {
if err != nil {
return types.NewErr(err.Error())
}
return types.Bytes(bytes)
}
func callInStrOutStr(fn func(string) (string, error)) functions.UnaryOp {
return func(val ref.Val) ref.Val {
vVal, ok := val.(types.String)
if !ok {
return types.MaybeNoSuchOverloadErr(val)
}
str, err := fn(string(vVal))
if err != nil {
return types.NewErr(err.Error())
}
return types.String(str)
func stringOrError(str string, err error) ref.Val {
if err != nil {
return types.NewErr(err.Error())
}
return types.String(str)
}
func callInStrIntOutStr(fn func(string, int64) (string, error)) functions.BinaryOp {
return func(val, arg ref.Val) ref.Val {
vVal, ok := val.(types.String)
if !ok {
return types.MaybeNoSuchOverloadErr(val)
}
argVal, ok := arg.(types.Int)
if !ok {
return types.MaybeNoSuchOverloadErr(arg)
}
out, err := fn(string(vVal), int64(argVal))
if err != nil {
return types.NewErr(err.Error())
}
return types.String(out)
}
}
func callInStrStrOutInt(fn func(string, string) (int64, error)) functions.BinaryOp {
return func(val, arg ref.Val) ref.Val {
vVal, ok := val.(types.String)
if !ok {
return types.MaybeNoSuchOverloadErr(val)
}
argVal, ok := arg.(types.String)
if !ok {
return types.MaybeNoSuchOverloadErr(arg)
}
out, err := fn(string(vVal), string(argVal))
if err != nil {
return types.NewErr(err.Error())
}
return types.Int(out)
}
}
func callInStrStrOutListStr(fn func(string, string) ([]string, error)) functions.BinaryOp {
return func(val, arg ref.Val) ref.Val {
vVal, ok := val.(types.String)
if !ok {
return types.MaybeNoSuchOverloadErr(val)
}
argVal, ok := arg.(types.String)
if !ok {
return types.MaybeNoSuchOverloadErr(arg)
}
out, err := fn(string(vVal), string(argVal))
if err != nil {
return types.NewErr(err.Error())
}
return types.DefaultTypeAdapter.NativeToValue(out)
}
}
func callInStrIntIntOutStr(fn func(string, int64, int64) (string, error)) functions.FunctionOp {
return func(args ...ref.Val) ref.Val {
if len(args) != 3 {
return types.NoSuchOverloadErr()
}
vVal, ok := args[0].(types.String)
if !ok {
return types.MaybeNoSuchOverloadErr(args[0])
}
arg1Val, ok := args[1].(types.Int)
if !ok {
return types.MaybeNoSuchOverloadErr(args[1])
}
arg2Val, ok := args[2].(types.Int)
if !ok {
return types.MaybeNoSuchOverloadErr(args[2])
}
out, err := fn(string(vVal), int64(arg1Val), int64(arg2Val))
if err != nil {
return types.NewErr(err.Error())
}
return types.String(out)
}
}
func callInStrStrStrOutStr(fn func(string, string, string) (string, error)) functions.FunctionOp {
return func(args ...ref.Val) ref.Val {
if len(args) != 3 {
return types.NoSuchOverloadErr()
}
vVal, ok := args[0].(types.String)
if !ok {
return types.MaybeNoSuchOverloadErr(args[0])
}
arg1Val, ok := args[1].(types.String)
if !ok {
return types.MaybeNoSuchOverloadErr(args[1])
}
arg2Val, ok := args[2].(types.String)
if !ok {
return types.MaybeNoSuchOverloadErr(args[2])
}
out, err := fn(string(vVal), string(arg1Val), string(arg2Val))
if err != nil {
return types.NewErr(err.Error())
}
return types.String(out)
}
}
func callInStrStrIntOutInt(fn func(string, string, int64) (int64, error)) functions.FunctionOp {
return func(args ...ref.Val) ref.Val {
if len(args) != 3 {
return types.NoSuchOverloadErr()
}
vVal, ok := args[0].(types.String)
if !ok {
return types.MaybeNoSuchOverloadErr(args[0])
}
arg1Val, ok := args[1].(types.String)
if !ok {
return types.MaybeNoSuchOverloadErr(args[1])
}
arg2Val, ok := args[2].(types.Int)
if !ok {
return types.MaybeNoSuchOverloadErr(args[2])
}
out, err := fn(string(vVal), string(arg1Val), int64(arg2Val))
if err != nil {
return types.NewErr(err.Error())
}
return types.Int(out)
}
}
func callInStrStrIntOutListStr(fn func(string, string, int64) ([]string, error)) functions.FunctionOp {
return func(args ...ref.Val) ref.Val {
if len(args) != 3 {
return types.NoSuchOverloadErr()
}
vVal, ok := args[0].(types.String)
if !ok {
return types.MaybeNoSuchOverloadErr(args[0])
}
arg1Val, ok := args[1].(types.String)
if !ok {
return types.MaybeNoSuchOverloadErr(args[1])
}
arg2Val, ok := args[2].(types.Int)
if !ok {
return types.MaybeNoSuchOverloadErr(args[2])
}
out, err := fn(string(vVal), string(arg1Val), int64(arg2Val))
if err != nil {
return types.NewErr(err.Error())
}
return types.DefaultTypeAdapter.NativeToValue(out)
}
}
func callInStrStrStrIntOutStr(fn func(string, string, string, int64) (string, error)) functions.FunctionOp {
return func(args ...ref.Val) ref.Val {
if len(args) != 4 {
return types.NoSuchOverloadErr()
}
vVal, ok := args[0].(types.String)
if !ok {
return types.MaybeNoSuchOverloadErr(args[0])
}
arg1Val, ok := args[1].(types.String)
if !ok {
return types.MaybeNoSuchOverloadErr(args[1])
}
arg2Val, ok := args[2].(types.String)
if !ok {
return types.MaybeNoSuchOverloadErr(args[2])
}
arg3Val, ok := args[3].(types.Int)
if !ok {
return types.MaybeNoSuchOverloadErr(args[3])
}
out, err := fn(string(vVal), string(arg1Val), string(arg2Val), int64(arg3Val))
if err != nil {
return types.NewErr(err.Error())
}
return types.String(out)
}
}
func callInListStrOutStr(fn func([]string) (string, error)) functions.UnaryOp {
return func(args1 ref.Val) ref.Val {
vVal, ok := args1.(traits.Lister)
if !ok {
return types.MaybeNoSuchOverloadErr(args1)
}
strings := make([]string, vVal.Size().Value().(int64))
i := 0
for it := vVal.Iterator(); it.HasNext() == types.True; {
next := it.Next()
v, ok := next.(types.String)
if !ok {
return types.MaybeNoSuchOverloadErr(next)
}
strings[i] = string(v)
i++
}
out, err := fn(strings)
if err != nil {
return types.NewErr(err.Error())
}
return types.DefaultTypeAdapter.NativeToValue(out)
}
}
func callInListStrStrOutStr(fn func([]string, string) (string, error)) functions.BinaryOp {
return func(args1, args2 ref.Val) ref.Val {
vVal, ok := args1.(traits.Lister)
if !ok {
return types.MaybeNoSuchOverloadErr(args1)
}
arg1Val, ok := args2.(types.String)
if !ok {
return types.MaybeNoSuchOverloadErr(args2)
}
strings := make([]string, vVal.Size().Value().(int64))
i := 0
for it := vVal.Iterator(); it.HasNext() == types.True; {
next := it.Next()
v, ok := next.(types.String)
if !ok {
return types.MaybeNoSuchOverloadErr(next)
}
strings[i] = string(v)
i++
}
out, err := fn(strings, string(arg1Val))
if err != nil {
return types.NewErr(err.Error())
}
return types.DefaultTypeAdapter.NativeToValue(out)
func listStringOrError(strs []string, err error) ref.Val {
if err != nil {
return types.NewErr(err.Error())
}
return types.DefaultTypeAdapter.NativeToValue(strs)
}

View File

@@ -19,16 +19,13 @@ package ext
import (
"fmt"
"reflect"
"strings"
"unicode"
"github.com/google/cel-go/cel"
"github.com/google/cel-go/checker/decls"
"github.com/google/cel-go/common/types"
"github.com/google/cel-go/common/types/ref"
"github.com/google/cel-go/interpreter/functions"
exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
)
// Strings returns a cel.EnvOption to configure extended functions for string manipulation.
@@ -207,194 +204,128 @@ type stringLib struct{}
func (stringLib) CompileOptions() []cel.EnvOption {
return []cel.EnvOption{
cel.Declarations(
decls.NewFunction("charAt",
decls.NewInstanceOverload("string_char_at_int",
[]*exprpb.Type{decls.String, decls.Int},
decls.String)),
decls.NewFunction("indexOf",
decls.NewInstanceOverload("string_index_of_string",
[]*exprpb.Type{decls.String, decls.String},
decls.Int),
decls.NewInstanceOverload("string_index_of_string_int",
[]*exprpb.Type{decls.String, decls.String, decls.Int},
decls.Int)),
decls.NewFunction("lastIndexOf",
decls.NewInstanceOverload("string_last_index_of_string",
[]*exprpb.Type{decls.String, decls.String},
decls.Int),
decls.NewInstanceOverload("string_last_index_of_string_int",
[]*exprpb.Type{decls.String, decls.String, decls.Int},
decls.Int)),
decls.NewFunction("lowerAscii",
decls.NewInstanceOverload("string_lower_ascii",
[]*exprpb.Type{decls.String},
decls.String)),
decls.NewFunction("replace",
decls.NewInstanceOverload("string_replace_string_string",
[]*exprpb.Type{decls.String, decls.String, decls.String},
decls.String),
decls.NewInstanceOverload("string_replace_string_string_int",
[]*exprpb.Type{decls.String, decls.String, decls.String, decls.Int},
decls.String)),
decls.NewFunction("split",
decls.NewInstanceOverload("string_split_string",
[]*exprpb.Type{decls.String, decls.String},
decls.NewListType(decls.String)),
decls.NewInstanceOverload("string_split_string_int",
[]*exprpb.Type{decls.String, decls.String, decls.Int},
decls.NewListType(decls.String))),
decls.NewFunction("substring",
decls.NewInstanceOverload("string_substring_int",
[]*exprpb.Type{decls.String, decls.Int},
decls.String),
decls.NewInstanceOverload("string_substring_int_int",
[]*exprpb.Type{decls.String, decls.Int, decls.Int},
decls.String)),
decls.NewFunction("trim",
decls.NewInstanceOverload("string_trim",
[]*exprpb.Type{decls.String},
decls.String)),
decls.NewFunction("upperAscii",
decls.NewInstanceOverload("string_upper_ascii",
[]*exprpb.Type{decls.String},
decls.String)),
decls.NewFunction("join",
decls.NewInstanceOverload("list_join",
[]*exprpb.Type{decls.NewListType(decls.String)},
decls.String),
decls.NewInstanceOverload("list_join_string",
[]*exprpb.Type{decls.NewListType(decls.String), decls.String},
decls.String),
),
),
cel.Function("charAt",
cel.MemberOverload("string_char_at_int", []*cel.Type{cel.StringType, cel.IntType}, cel.StringType,
cel.BinaryBinding(func(str, ind ref.Val) ref.Val {
s := str.(types.String)
i := ind.(types.Int)
return stringOrError(charAt(string(s), int64(i)))
}))),
cel.Function("indexOf",
cel.MemberOverload("string_index_of_string", []*cel.Type{cel.StringType, cel.StringType}, cel.IntType,
cel.BinaryBinding(func(str, substr ref.Val) ref.Val {
s := str.(types.String)
sub := substr.(types.String)
return intOrError(indexOf(string(s), string(sub)))
})),
cel.MemberOverload("string_index_of_string_int", []*cel.Type{cel.StringType, cel.StringType, cel.IntType}, cel.IntType,
cel.FunctionBinding(func(args ...ref.Val) ref.Val {
s := args[0].(types.String)
sub := args[1].(types.String)
offset := args[2].(types.Int)
return intOrError(indexOfOffset(string(s), string(sub), int64(offset)))
}))),
cel.Function("lastIndexOf",
cel.MemberOverload("string_last_index_of_string", []*cel.Type{cel.StringType, cel.StringType}, cel.IntType,
cel.BinaryBinding(func(str, substr ref.Val) ref.Val {
s := str.(types.String)
sub := substr.(types.String)
return intOrError(lastIndexOf(string(s), string(sub)))
})),
cel.MemberOverload("string_last_index_of_string_int", []*cel.Type{cel.StringType, cel.StringType, cel.IntType}, cel.IntType,
cel.FunctionBinding(func(args ...ref.Val) ref.Val {
s := args[0].(types.String)
sub := args[1].(types.String)
offset := args[2].(types.Int)
return intOrError(lastIndexOfOffset(string(s), string(sub), int64(offset)))
}))),
cel.Function("lowerAscii",
cel.MemberOverload("string_lower_ascii", []*cel.Type{cel.StringType}, cel.StringType,
cel.UnaryBinding(func(str ref.Val) ref.Val {
s := str.(types.String)
return stringOrError(lowerASCII(string(s)))
}))),
cel.Function("replace",
cel.MemberOverload(
"string_replace_string_string", []*cel.Type{cel.StringType, cel.StringType, cel.StringType}, cel.StringType,
cel.FunctionBinding(func(args ...ref.Val) ref.Val {
str := args[0].(types.String)
old := args[1].(types.String)
new := args[2].(types.String)
return stringOrError(replace(string(str), string(old), string(new)))
})),
cel.MemberOverload(
"string_replace_string_string_int", []*cel.Type{cel.StringType, cel.StringType, cel.StringType, cel.IntType}, cel.StringType,
cel.FunctionBinding(func(args ...ref.Val) ref.Val {
str := args[0].(types.String)
old := args[1].(types.String)
new := args[2].(types.String)
n := args[3].(types.Int)
return stringOrError(replaceN(string(str), string(old), string(new), int64(n)))
}))),
cel.Function("split",
cel.MemberOverload("string_split_string", []*cel.Type{cel.StringType, cel.StringType}, cel.ListType(cel.StringType),
cel.BinaryBinding(func(str, separator ref.Val) ref.Val {
s := str.(types.String)
sep := separator.(types.String)
return listStringOrError(split(string(s), string(sep)))
})),
cel.MemberOverload("string_split_string_int", []*cel.Type{cel.StringType, cel.StringType, cel.IntType}, cel.ListType(cel.StringType),
cel.FunctionBinding(func(args ...ref.Val) ref.Val {
s := args[0].(types.String)
sep := args[1].(types.String)
n := args[2].(types.Int)
return listStringOrError(splitN(string(s), string(sep), int64(n)))
}))),
cel.Function("substring",
cel.MemberOverload("string_substring_int", []*cel.Type{cel.StringType, cel.IntType}, cel.StringType,
cel.BinaryBinding(func(str, offset ref.Val) ref.Val {
s := str.(types.String)
off := offset.(types.Int)
return stringOrError(substr(string(s), int64(off)))
})),
cel.MemberOverload("string_substring_int_int", []*cel.Type{cel.StringType, cel.IntType, cel.IntType}, cel.StringType,
cel.FunctionBinding(func(args ...ref.Val) ref.Val {
s := args[0].(types.String)
start := args[1].(types.Int)
end := args[2].(types.Int)
return stringOrError(substrRange(string(s), int64(start), int64(end)))
}))),
cel.Function("trim",
cel.MemberOverload("string_trim", []*cel.Type{cel.StringType}, cel.StringType,
cel.UnaryBinding(func(str ref.Val) ref.Val {
s := str.(types.String)
return stringOrError(trimSpace(string(s)))
}))),
cel.Function("upperAscii",
cel.MemberOverload("string_upper_ascii", []*cel.Type{cel.StringType}, cel.StringType,
cel.UnaryBinding(func(str ref.Val) ref.Val {
s := str.(types.String)
return stringOrError(upperASCII(string(s)))
}))),
cel.Function("join",
cel.MemberOverload("list_join", []*cel.Type{cel.ListType(cel.StringType)}, cel.StringType,
cel.UnaryBinding(func(list ref.Val) ref.Val {
l, err := list.ConvertToNative(stringListType)
if err != nil {
return types.NewErr(err.Error())
}
return stringOrError(join(l.([]string)))
})),
cel.MemberOverload("list_join_string", []*cel.Type{cel.ListType(cel.StringType), cel.StringType}, cel.StringType,
cel.BinaryBinding(func(list, delim ref.Val) ref.Val {
l, err := list.ConvertToNative(stringListType)
if err != nil {
return types.NewErr(err.Error())
}
d := delim.(types.String)
return stringOrError(joinSeparator(l.([]string), string(d)))
}))),
}
}
func (stringLib) ProgramOptions() []cel.ProgramOption {
wrappedReplace := callInStrStrStrOutStr(replace)
wrappedReplaceN := callInStrStrStrIntOutStr(replaceN)
return []cel.ProgramOption{
cel.Functions(
&functions.Overload{
Operator: "charAt",
Binary: callInStrIntOutStr(charAt),
},
&functions.Overload{
Operator: "string_char_at_int",
Binary: callInStrIntOutStr(charAt),
},
&functions.Overload{
Operator: "indexOf",
Binary: callInStrStrOutInt(indexOf),
Function: callInStrStrIntOutInt(indexOfOffset),
},
&functions.Overload{
Operator: "string_index_of_string",
Binary: callInStrStrOutInt(indexOf),
},
&functions.Overload{
Operator: "string_index_of_string_int",
Function: callInStrStrIntOutInt(indexOfOffset),
},
&functions.Overload{
Operator: "lastIndexOf",
Binary: callInStrStrOutInt(lastIndexOf),
Function: callInStrStrIntOutInt(lastIndexOfOffset),
},
&functions.Overload{
Operator: "string_last_index_of_string",
Binary: callInStrStrOutInt(lastIndexOf),
},
&functions.Overload{
Operator: "string_last_index_of_string_int",
Function: callInStrStrIntOutInt(lastIndexOfOffset),
},
&functions.Overload{
Operator: "lowerAscii",
Unary: callInStrOutStr(lowerASCII),
},
&functions.Overload{
Operator: "string_lower_ascii",
Unary: callInStrOutStr(lowerASCII),
},
&functions.Overload{
Operator: "replace",
Function: func(values ...ref.Val) ref.Val {
if len(values) == 3 {
return wrappedReplace(values...)
}
if len(values) == 4 {
return wrappedReplaceN(values...)
}
return types.NoSuchOverloadErr()
},
},
&functions.Overload{
Operator: "string_replace_string_string",
Function: wrappedReplace,
},
&functions.Overload{
Operator: "string_replace_string_string_int",
Function: wrappedReplaceN,
},
&functions.Overload{
Operator: "split",
Binary: callInStrStrOutListStr(split),
Function: callInStrStrIntOutListStr(splitN),
},
&functions.Overload{
Operator: "string_split_string",
Binary: callInStrStrOutListStr(split),
},
&functions.Overload{
Operator: "string_split_string_int",
Function: callInStrStrIntOutListStr(splitN),
},
&functions.Overload{
Operator: "substring",
Binary: callInStrIntOutStr(substr),
Function: callInStrIntIntOutStr(substrRange),
},
&functions.Overload{
Operator: "string_substring_int",
Binary: callInStrIntOutStr(substr),
},
&functions.Overload{
Operator: "string_substring_int_int",
Function: callInStrIntIntOutStr(substrRange),
},
&functions.Overload{
Operator: "trim",
Unary: callInStrOutStr(trimSpace),
},
&functions.Overload{
Operator: "string_trim",
Unary: callInStrOutStr(trimSpace),
},
&functions.Overload{
Operator: "upperAscii",
Unary: callInStrOutStr(upperASCII),
},
&functions.Overload{
Operator: "string_upper_ascii",
Unary: callInStrOutStr(upperASCII),
},
&functions.Overload{
Operator: "join",
Unary: callInListStrOutStr(join),
Binary: callInListStrStrOutStr(joinSeparator),
},
&functions.Overload{
Operator: "list_join",
Unary: callInListStrOutStr(join),
},
&functions.Overload{
Operator: "list_join_string",
Binary: callInListStrStrOutStr(joinSeparator),
},
),
}
return []cel.ProgramOption{}
}
func charAt(str string, ind int64) (string, error) {
@@ -546,3 +477,7 @@ func joinSeparator(strs []string, separator string) (string, error) {
func join(strs []string) (string, error) {
return strings.Join(strs, ""), nil
}
var (
stringListType = reflect.TypeOf([]string{})
)

View File

@@ -45,6 +45,10 @@ type Overload struct {
// Function defines the overload with a FunctionOp implementation. May be
// nil.
Function FunctionOp
// NonStrict specifies whether the Overload will tolerate arguments that
// are types.Err or types.Unknown.
NonStrict bool
}
// UnaryOp is a function that takes a single value and produces an output.

View File

@@ -421,12 +421,13 @@ func (zero *evalZeroArity) Args() []Interpretable {
}
type evalUnary struct {
id int64
function string
overload string
arg Interpretable
trait int
impl functions.UnaryOp
id int64
function string
overload string
arg Interpretable
trait int
impl functions.UnaryOp
nonStrict bool
}
// ID implements the Interpretable interface method.
@@ -438,12 +439,13 @@ func (un *evalUnary) ID() int64 {
func (un *evalUnary) Eval(ctx Activation) ref.Val {
argVal := un.arg.Eval(ctx)
// Early return if the argument to the function is unknown or error.
if types.IsUnknownOrError(argVal) {
strict := !un.nonStrict
if strict && types.IsUnknownOrError(argVal) {
return argVal
}
// If the implementation is bound and the argument value has the right traits required to
// invoke it, then call the implementation.
if un.impl != nil && (un.trait == 0 || argVal.Type().HasTrait(un.trait)) {
if un.impl != nil && (un.trait == 0 || (!strict && types.IsUnknownOrError(argVal)) || argVal.Type().HasTrait(un.trait)) {
return un.impl(argVal)
}
// Otherwise, if the argument is a ReceiverType attempt to invoke the receiver method on the
@@ -478,13 +480,14 @@ func (un *evalUnary) Args() []Interpretable {
}
type evalBinary struct {
id int64
function string
overload string
lhs Interpretable
rhs Interpretable
trait int
impl functions.BinaryOp
id int64
function string
overload string
lhs Interpretable
rhs Interpretable
trait int
impl functions.BinaryOp
nonStrict bool
}
// ID implements the Interpretable interface method.
@@ -497,15 +500,18 @@ func (bin *evalBinary) Eval(ctx Activation) ref.Val {
lVal := bin.lhs.Eval(ctx)
rVal := bin.rhs.Eval(ctx)
// Early return if any argument to the function is unknown or error.
if types.IsUnknownOrError(lVal) {
return lVal
}
if types.IsUnknownOrError(rVal) {
return rVal
strict := !bin.nonStrict
if strict {
if types.IsUnknownOrError(lVal) {
return lVal
}
if types.IsUnknownOrError(rVal) {
return rVal
}
}
// If the implementation is bound and the argument value has the right traits required to
// invoke it, then call the implementation.
if bin.impl != nil && (bin.trait == 0 || lVal.Type().HasTrait(bin.trait)) {
if bin.impl != nil && (bin.trait == 0 || (!strict && types.IsUnknownOrError(lVal)) || lVal.Type().HasTrait(bin.trait)) {
return bin.impl(lVal, rVal)
}
// Otherwise, if the argument is a ReceiverType attempt to invoke the receiver method on the
@@ -537,12 +543,13 @@ func (bin *evalBinary) Args() []Interpretable {
}
type evalVarArgs struct {
id int64
function string
overload string
args []Interpretable
trait int
impl functions.FunctionOp
id int64
function string
overload string
args []Interpretable
trait int
impl functions.FunctionOp
nonStrict bool
}
// NewCall creates a new call Interpretable.
@@ -565,16 +572,17 @@ func (fn *evalVarArgs) ID() int64 {
func (fn *evalVarArgs) Eval(ctx Activation) ref.Val {
argVals := make([]ref.Val, len(fn.args))
// Early return if any argument to the function is unknown or error.
strict := !fn.nonStrict
for i, arg := range fn.args {
argVals[i] = arg.Eval(ctx)
if types.IsUnknownOrError(argVals[i]) {
if strict && types.IsUnknownOrError(argVals[i]) {
return argVals[i]
}
}
// If the implementation is bound and the argument value has the right traits required to
// invoke it, then call the implementation.
arg0 := argVals[0]
if fn.impl != nil && (fn.trait == 0 || arg0.Type().HasTrait(fn.trait)) {
if fn.impl != nil && (fn.trait == 0 || (!strict && types.IsUnknownOrError(arg0)) || arg0.Type().HasTrait(fn.trait)) {
return fn.impl(argVals...)
}
// Otherwise, if the argument is a ReceiverType attempt to invoke the receiver method on the

View File

@@ -302,8 +302,18 @@ func (p *planner) planCall(expr *exprpb.Expr) (Interpretable, error) {
case 0:
return p.planCallZero(expr, fnName, oName, fnDef)
case 1:
// If the FunctionOp has been used, then use it as it may exist for the purposes
// of dynamic dispatch within a singleton function implementation.
if fnDef != nil && fnDef.Unary == nil && fnDef.Function != nil {
return p.planCallVarArgs(expr, fnName, oName, fnDef, args)
}
return p.planCallUnary(expr, fnName, oName, fnDef, args)
case 2:
// If the FunctionOp has been used, then use it as it may exist for the purposes
// of dynamic dispatch within a singleton function implementation.
if fnDef != nil && fnDef.Binary == nil && fnDef.Function != nil {
return p.planCallVarArgs(expr, fnName, oName, fnDef, args)
}
return p.planCallBinary(expr, fnName, oName, fnDef, args)
default:
return p.planCallVarArgs(expr, fnName, oName, fnDef, args)
@@ -334,20 +344,23 @@ func (p *planner) planCallUnary(expr *exprpb.Expr,
args []Interpretable) (Interpretable, error) {
var fn functions.UnaryOp
var trait int
var nonStrict bool
if impl != nil {
if impl.Unary == nil {
return nil, fmt.Errorf("no such overload: %s(arg)", function)
}
fn = impl.Unary
trait = impl.OperandTrait
nonStrict = impl.NonStrict
}
return &evalUnary{
id: expr.Id,
function: function,
overload: overload,
arg: args[0],
trait: trait,
impl: fn,
id: expr.Id,
function: function,
overload: overload,
arg: args[0],
trait: trait,
impl: fn,
nonStrict: nonStrict,
}, nil
}
@@ -359,21 +372,24 @@ func (p *planner) planCallBinary(expr *exprpb.Expr,
args []Interpretable) (Interpretable, error) {
var fn functions.BinaryOp
var trait int
var nonStrict bool
if impl != nil {
if impl.Binary == nil {
return nil, fmt.Errorf("no such overload: %s(lhs, rhs)", function)
}
fn = impl.Binary
trait = impl.OperandTrait
nonStrict = impl.NonStrict
}
return &evalBinary{
id: expr.Id,
function: function,
overload: overload,
lhs: args[0],
rhs: args[1],
trait: trait,
impl: fn,
id: expr.Id,
function: function,
overload: overload,
lhs: args[0],
rhs: args[1],
trait: trait,
impl: fn,
nonStrict: nonStrict,
}, nil
}
@@ -385,20 +401,23 @@ func (p *planner) planCallVarArgs(expr *exprpb.Expr,
args []Interpretable) (Interpretable, error) {
var fn functions.FunctionOp
var trait int
var nonStrict bool
if impl != nil {
if impl.Function == nil {
return nil, fmt.Errorf("no such overload: %s(...)", function)
}
fn = impl.Function
trait = impl.OperandTrait
nonStrict = impl.NonStrict
}
return &evalVarArgs{
id: expr.Id,
function: function,
overload: overload,
args: args,
trait: trait,
impl: fn,
id: expr.Id,
function: function,
overload: overload,
args: args,
trait: trait,
impl: fn,
nonStrict: nonStrict,
}, nil
}

View File

@@ -23,7 +23,7 @@ go_library(
"//common/operators:go_default_library",
"//common/runes:go_default_library",
"//parser/gen:go_default_library",
"@com_github_antlr//runtime/Go/antlr:go_default_library",
"@com_github_antlr_antlr4_runtime_go_antlr//:go_default_library",
"@org_golang_google_genproto//googleapis/api/expr/v1alpha1:go_default_library",
"@org_golang_google_protobuf//proto:go_default_library",
"@org_golang_google_protobuf//types/known/structpb:go_default_library",
@@ -45,7 +45,7 @@ go_test(
"//common/debug:go_default_library",
"//parser/gen:go_default_library",
"//test:go_default_library",
"@com_github_antlr//runtime/Go/antlr:go_default_library",
"@com_github_antlr_antlr4_runtime_go_antlr//:go_default_library",
"@org_golang_google_protobuf//proto:go_default_library",
],
)

View File

@@ -21,6 +21,6 @@ go_library(
],
importpath = "github.com/google/cel-go/parser/gen",
deps = [
"@com_github_antlr//runtime/Go/antlr:go_default_library",
"@com_github_antlr_antlr4_runtime_go_antlr//:go_default_library",
],
)

96
vendor/github.com/google/cel-go/parser/gen/CEL.interp generated vendored Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,4 +1,4 @@
// Code generated from /Users/tswadell/go/src/github.com/google/cel-go/bin/../parser/gen/CEL.g4 by ANTLR 4.9.1. DO NOT EDIT.
// Code generated from /Users/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.10.1. DO NOT EDIT.
package gen // CEL
import "github.com/antlr/antlr4/runtime/Go/antlr"

View File

@@ -1,4 +1,4 @@
// Code generated from /Users/tswadell/go/src/github.com/google/cel-go/bin/../parser/gen/CEL.g4 by ANTLR 4.9.1. DO NOT EDIT.
// Code generated from /Users/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.10.1. DO NOT EDIT.
package gen // CEL
import "github.com/antlr/antlr4/runtime/Go/antlr"

View File

@@ -1,9 +1,10 @@
// Code generated from /Users/tswadell/go/src/github.com/google/cel-go/bin/../parser/gen/CEL.g4 by ANTLR 4.9.1. DO NOT EDIT.
// Code generated from /Users/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.10.1. DO NOT EDIT.
package gen
import (
"fmt"
"sync"
"unicode"
"github.com/antlr/antlr4/runtime/Go/antlr"
@@ -11,243 +12,9 @@ import (
// Suppress unused import error
var _ = fmt.Printf
var _ = sync.Once{}
var _ = unicode.IsLetter
var serializedLexerAtn = []uint16{
3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 38, 425,
8, 1, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7,
9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12,
4, 13, 9, 13, 4, 14, 9, 14, 4, 15, 9, 15, 4, 16, 9, 16, 4, 17, 9, 17, 4,
18, 9, 18, 4, 19, 9, 19, 4, 20, 9, 20, 4, 21, 9, 21, 4, 22, 9, 22, 4, 23,
9, 23, 4, 24, 9, 24, 4, 25, 9, 25, 4, 26, 9, 26, 4, 27, 9, 27, 4, 28, 9,
28, 4, 29, 9, 29, 4, 30, 9, 30, 4, 31, 9, 31, 4, 32, 9, 32, 4, 33, 9, 33,
4, 34, 9, 34, 4, 35, 9, 35, 4, 36, 9, 36, 4, 37, 9, 37, 4, 38, 9, 38, 4,
39, 9, 39, 4, 40, 9, 40, 4, 41, 9, 41, 4, 42, 9, 42, 4, 43, 9, 43, 4, 44,
9, 44, 4, 45, 9, 45, 4, 46, 9, 46, 4, 47, 9, 47, 4, 48, 9, 48, 3, 2, 3,
2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 4, 3, 4, 3, 4, 3, 5, 3, 5, 3, 6, 3, 6, 3,
6, 3, 7, 3, 7, 3, 7, 3, 8, 3, 8, 3, 9, 3, 9, 3, 9, 3, 10, 3, 10, 3, 10,
3, 11, 3, 11, 3, 12, 3, 12, 3, 13, 3, 13, 3, 14, 3, 14, 3, 15, 3, 15, 3,
16, 3, 16, 3, 17, 3, 17, 3, 18, 3, 18, 3, 19, 3, 19, 3, 20, 3, 20, 3, 21,
3, 21, 3, 22, 3, 22, 3, 23, 3, 23, 3, 24, 3, 24, 3, 25, 3, 25, 3, 26, 3,
26, 3, 27, 3, 27, 3, 27, 3, 27, 3, 27, 3, 28, 3, 28, 3, 28, 3, 28, 3, 28,
3, 28, 3, 29, 3, 29, 3, 29, 3, 29, 3, 29, 3, 30, 3, 30, 3, 31, 3, 31, 3,
32, 3, 32, 3, 33, 3, 33, 5, 33, 179, 10, 33, 3, 33, 6, 33, 182, 10, 33,
13, 33, 14, 33, 183, 3, 34, 3, 34, 3, 35, 3, 35, 3, 36, 3, 36, 3, 36, 3,
36, 5, 36, 194, 10, 36, 3, 37, 3, 37, 3, 37, 3, 38, 3, 38, 3, 38, 3, 38,
3, 38, 3, 39, 3, 39, 3, 39, 3, 39, 3, 39, 3, 40, 3, 40, 3, 40, 3, 40, 3,
40, 3, 40, 3, 40, 3, 40, 3, 40, 3, 40, 3, 40, 3, 40, 3, 40, 3, 40, 3, 40,
3, 40, 3, 40, 3, 40, 5, 40, 227, 10, 40, 3, 41, 6, 41, 230, 10, 41, 13,
41, 14, 41, 231, 3, 41, 3, 41, 3, 42, 3, 42, 3, 42, 3, 42, 7, 42, 240,
10, 42, 12, 42, 14, 42, 243, 11, 42, 3, 42, 3, 42, 3, 43, 6, 43, 248, 10,
43, 13, 43, 14, 43, 249, 3, 43, 3, 43, 6, 43, 254, 10, 43, 13, 43, 14,
43, 255, 3, 43, 5, 43, 259, 10, 43, 3, 43, 6, 43, 262, 10, 43, 13, 43,
14, 43, 263, 3, 43, 3, 43, 3, 43, 3, 43, 6, 43, 270, 10, 43, 13, 43, 14,
43, 271, 3, 43, 5, 43, 275, 10, 43, 5, 43, 277, 10, 43, 3, 44, 6, 44, 280,
10, 44, 13, 44, 14, 44, 281, 3, 44, 3, 44, 3, 44, 3, 44, 6, 44, 288, 10,
44, 13, 44, 14, 44, 289, 5, 44, 292, 10, 44, 3, 45, 6, 45, 295, 10, 45,
13, 45, 14, 45, 296, 3, 45, 3, 45, 3, 45, 3, 45, 3, 45, 3, 45, 6, 45, 305,
10, 45, 13, 45, 14, 45, 306, 3, 45, 3, 45, 5, 45, 311, 10, 45, 3, 46, 3,
46, 3, 46, 7, 46, 316, 10, 46, 12, 46, 14, 46, 319, 11, 46, 3, 46, 3, 46,
3, 46, 3, 46, 7, 46, 325, 10, 46, 12, 46, 14, 46, 328, 11, 46, 3, 46, 3,
46, 3, 46, 3, 46, 3, 46, 3, 46, 3, 46, 7, 46, 337, 10, 46, 12, 46, 14,
46, 340, 11, 46, 3, 46, 3, 46, 3, 46, 3, 46, 3, 46, 3, 46, 3, 46, 3, 46,
3, 46, 7, 46, 351, 10, 46, 12, 46, 14, 46, 354, 11, 46, 3, 46, 3, 46, 3,
46, 3, 46, 3, 46, 3, 46, 7, 46, 362, 10, 46, 12, 46, 14, 46, 365, 11, 46,
3, 46, 3, 46, 3, 46, 3, 46, 3, 46, 7, 46, 372, 10, 46, 12, 46, 14, 46,
375, 11, 46, 3, 46, 3, 46, 3, 46, 3, 46, 3, 46, 3, 46, 3, 46, 3, 46, 7,
46, 385, 10, 46, 12, 46, 14, 46, 388, 11, 46, 3, 46, 3, 46, 3, 46, 3, 46,
3, 46, 3, 46, 3, 46, 3, 46, 3, 46, 3, 46, 7, 46, 400, 10, 46, 12, 46, 14,
46, 403, 11, 46, 3, 46, 3, 46, 3, 46, 3, 46, 5, 46, 409, 10, 46, 3, 47,
3, 47, 3, 47, 3, 48, 3, 48, 5, 48, 416, 10, 48, 3, 48, 3, 48, 3, 48, 7,
48, 421, 10, 48, 12, 48, 14, 48, 424, 11, 48, 6, 338, 352, 386, 401, 2,
49, 3, 3, 5, 4, 7, 5, 9, 6, 11, 7, 13, 8, 15, 9, 17, 10, 19, 11, 21, 12,
23, 13, 25, 14, 27, 15, 29, 16, 31, 17, 33, 18, 35, 19, 37, 20, 39, 21,
41, 22, 43, 23, 45, 24, 47, 25, 49, 26, 51, 27, 53, 28, 55, 29, 57, 30,
59, 2, 61, 2, 63, 2, 65, 2, 67, 2, 69, 2, 71, 2, 73, 2, 75, 2, 77, 2, 79,
2, 81, 31, 83, 32, 85, 33, 87, 34, 89, 35, 91, 36, 93, 37, 95, 38, 3, 2,
18, 4, 2, 67, 92, 99, 124, 4, 2, 71, 71, 103, 103, 4, 2, 45, 45, 47, 47,
5, 2, 50, 59, 67, 72, 99, 104, 4, 2, 84, 84, 116, 116, 12, 2, 36, 36, 41,
41, 65, 65, 94, 94, 98, 100, 104, 104, 112, 112, 116, 116, 118, 118, 120,
120, 4, 2, 90, 90, 122, 122, 5, 2, 11, 12, 14, 15, 34, 34, 3, 2, 12, 12,
4, 2, 87, 87, 119, 119, 6, 2, 12, 12, 15, 15, 36, 36, 94, 94, 6, 2, 12,
12, 15, 15, 41, 41, 94, 94, 3, 2, 94, 94, 5, 2, 12, 12, 15, 15, 36, 36,
5, 2, 12, 12, 15, 15, 41, 41, 4, 2, 68, 68, 100, 100, 2, 458, 2, 3, 3,
2, 2, 2, 2, 5, 3, 2, 2, 2, 2, 7, 3, 2, 2, 2, 2, 9, 3, 2, 2, 2, 2, 11, 3,
2, 2, 2, 2, 13, 3, 2, 2, 2, 2, 15, 3, 2, 2, 2, 2, 17, 3, 2, 2, 2, 2, 19,
3, 2, 2, 2, 2, 21, 3, 2, 2, 2, 2, 23, 3, 2, 2, 2, 2, 25, 3, 2, 2, 2, 2,
27, 3, 2, 2, 2, 2, 29, 3, 2, 2, 2, 2, 31, 3, 2, 2, 2, 2, 33, 3, 2, 2, 2,
2, 35, 3, 2, 2, 2, 2, 37, 3, 2, 2, 2, 2, 39, 3, 2, 2, 2, 2, 41, 3, 2, 2,
2, 2, 43, 3, 2, 2, 2, 2, 45, 3, 2, 2, 2, 2, 47, 3, 2, 2, 2, 2, 49, 3, 2,
2, 2, 2, 51, 3, 2, 2, 2, 2, 53, 3, 2, 2, 2, 2, 55, 3, 2, 2, 2, 2, 57, 3,
2, 2, 2, 2, 81, 3, 2, 2, 2, 2, 83, 3, 2, 2, 2, 2, 85, 3, 2, 2, 2, 2, 87,
3, 2, 2, 2, 2, 89, 3, 2, 2, 2, 2, 91, 3, 2, 2, 2, 2, 93, 3, 2, 2, 2, 2,
95, 3, 2, 2, 2, 3, 97, 3, 2, 2, 2, 5, 100, 3, 2, 2, 2, 7, 103, 3, 2, 2,
2, 9, 106, 3, 2, 2, 2, 11, 108, 3, 2, 2, 2, 13, 111, 3, 2, 2, 2, 15, 114,
3, 2, 2, 2, 17, 116, 3, 2, 2, 2, 19, 119, 3, 2, 2, 2, 21, 122, 3, 2, 2,
2, 23, 124, 3, 2, 2, 2, 25, 126, 3, 2, 2, 2, 27, 128, 3, 2, 2, 2, 29, 130,
3, 2, 2, 2, 31, 132, 3, 2, 2, 2, 33, 134, 3, 2, 2, 2, 35, 136, 3, 2, 2,
2, 37, 138, 3, 2, 2, 2, 39, 140, 3, 2, 2, 2, 41, 142, 3, 2, 2, 2, 43, 144,
3, 2, 2, 2, 45, 146, 3, 2, 2, 2, 47, 148, 3, 2, 2, 2, 49, 150, 3, 2, 2,
2, 51, 152, 3, 2, 2, 2, 53, 154, 3, 2, 2, 2, 55, 159, 3, 2, 2, 2, 57, 165,
3, 2, 2, 2, 59, 170, 3, 2, 2, 2, 61, 172, 3, 2, 2, 2, 63, 174, 3, 2, 2,
2, 65, 176, 3, 2, 2, 2, 67, 185, 3, 2, 2, 2, 69, 187, 3, 2, 2, 2, 71, 193,
3, 2, 2, 2, 73, 195, 3, 2, 2, 2, 75, 198, 3, 2, 2, 2, 77, 203, 3, 2, 2,
2, 79, 226, 3, 2, 2, 2, 81, 229, 3, 2, 2, 2, 83, 235, 3, 2, 2, 2, 85, 276,
3, 2, 2, 2, 87, 291, 3, 2, 2, 2, 89, 310, 3, 2, 2, 2, 91, 408, 3, 2, 2,
2, 93, 410, 3, 2, 2, 2, 95, 415, 3, 2, 2, 2, 97, 98, 7, 63, 2, 2, 98, 99,
7, 63, 2, 2, 99, 4, 3, 2, 2, 2, 100, 101, 7, 35, 2, 2, 101, 102, 7, 63,
2, 2, 102, 6, 3, 2, 2, 2, 103, 104, 7, 107, 2, 2, 104, 105, 7, 112, 2,
2, 105, 8, 3, 2, 2, 2, 106, 107, 7, 62, 2, 2, 107, 10, 3, 2, 2, 2, 108,
109, 7, 62, 2, 2, 109, 110, 7, 63, 2, 2, 110, 12, 3, 2, 2, 2, 111, 112,
7, 64, 2, 2, 112, 113, 7, 63, 2, 2, 113, 14, 3, 2, 2, 2, 114, 115, 7, 64,
2, 2, 115, 16, 3, 2, 2, 2, 116, 117, 7, 40, 2, 2, 117, 118, 7, 40, 2, 2,
118, 18, 3, 2, 2, 2, 119, 120, 7, 126, 2, 2, 120, 121, 7, 126, 2, 2, 121,
20, 3, 2, 2, 2, 122, 123, 7, 93, 2, 2, 123, 22, 3, 2, 2, 2, 124, 125, 7,
95, 2, 2, 125, 24, 3, 2, 2, 2, 126, 127, 7, 125, 2, 2, 127, 26, 3, 2, 2,
2, 128, 129, 7, 127, 2, 2, 129, 28, 3, 2, 2, 2, 130, 131, 7, 42, 2, 2,
131, 30, 3, 2, 2, 2, 132, 133, 7, 43, 2, 2, 133, 32, 3, 2, 2, 2, 134, 135,
7, 48, 2, 2, 135, 34, 3, 2, 2, 2, 136, 137, 7, 46, 2, 2, 137, 36, 3, 2,
2, 2, 138, 139, 7, 47, 2, 2, 139, 38, 3, 2, 2, 2, 140, 141, 7, 35, 2, 2,
141, 40, 3, 2, 2, 2, 142, 143, 7, 65, 2, 2, 143, 42, 3, 2, 2, 2, 144, 145,
7, 60, 2, 2, 145, 44, 3, 2, 2, 2, 146, 147, 7, 45, 2, 2, 147, 46, 3, 2,
2, 2, 148, 149, 7, 44, 2, 2, 149, 48, 3, 2, 2, 2, 150, 151, 7, 49, 2, 2,
151, 50, 3, 2, 2, 2, 152, 153, 7, 39, 2, 2, 153, 52, 3, 2, 2, 2, 154, 155,
7, 118, 2, 2, 155, 156, 7, 116, 2, 2, 156, 157, 7, 119, 2, 2, 157, 158,
7, 103, 2, 2, 158, 54, 3, 2, 2, 2, 159, 160, 7, 104, 2, 2, 160, 161, 7,
99, 2, 2, 161, 162, 7, 110, 2, 2, 162, 163, 7, 117, 2, 2, 163, 164, 7,
103, 2, 2, 164, 56, 3, 2, 2, 2, 165, 166, 7, 112, 2, 2, 166, 167, 7, 119,
2, 2, 167, 168, 7, 110, 2, 2, 168, 169, 7, 110, 2, 2, 169, 58, 3, 2, 2,
2, 170, 171, 7, 94, 2, 2, 171, 60, 3, 2, 2, 2, 172, 173, 9, 2, 2, 2, 173,
62, 3, 2, 2, 2, 174, 175, 4, 50, 59, 2, 175, 64, 3, 2, 2, 2, 176, 178,
9, 3, 2, 2, 177, 179, 9, 4, 2, 2, 178, 177, 3, 2, 2, 2, 178, 179, 3, 2,
2, 2, 179, 181, 3, 2, 2, 2, 180, 182, 5, 63, 32, 2, 181, 180, 3, 2, 2,
2, 182, 183, 3, 2, 2, 2, 183, 181, 3, 2, 2, 2, 183, 184, 3, 2, 2, 2, 184,
66, 3, 2, 2, 2, 185, 186, 9, 5, 2, 2, 186, 68, 3, 2, 2, 2, 187, 188, 9,
6, 2, 2, 188, 70, 3, 2, 2, 2, 189, 194, 5, 73, 37, 2, 190, 194, 5, 77,
39, 2, 191, 194, 5, 79, 40, 2, 192, 194, 5, 75, 38, 2, 193, 189, 3, 2,
2, 2, 193, 190, 3, 2, 2, 2, 193, 191, 3, 2, 2, 2, 193, 192, 3, 2, 2, 2,
194, 72, 3, 2, 2, 2, 195, 196, 5, 59, 30, 2, 196, 197, 9, 7, 2, 2, 197,
74, 3, 2, 2, 2, 198, 199, 5, 59, 30, 2, 199, 200, 4, 50, 53, 2, 200, 201,
4, 50, 57, 2, 201, 202, 4, 50, 57, 2, 202, 76, 3, 2, 2, 2, 203, 204, 5,
59, 30, 2, 204, 205, 9, 8, 2, 2, 205, 206, 5, 67, 34, 2, 206, 207, 5, 67,
34, 2, 207, 78, 3, 2, 2, 2, 208, 209, 5, 59, 30, 2, 209, 210, 7, 119, 2,
2, 210, 211, 5, 67, 34, 2, 211, 212, 5, 67, 34, 2, 212, 213, 5, 67, 34,
2, 213, 214, 5, 67, 34, 2, 214, 227, 3, 2, 2, 2, 215, 216, 5, 59, 30, 2,
216, 217, 7, 87, 2, 2, 217, 218, 5, 67, 34, 2, 218, 219, 5, 67, 34, 2,
219, 220, 5, 67, 34, 2, 220, 221, 5, 67, 34, 2, 221, 222, 5, 67, 34, 2,
222, 223, 5, 67, 34, 2, 223, 224, 5, 67, 34, 2, 224, 225, 5, 67, 34, 2,
225, 227, 3, 2, 2, 2, 226, 208, 3, 2, 2, 2, 226, 215, 3, 2, 2, 2, 227,
80, 3, 2, 2, 2, 228, 230, 9, 9, 2, 2, 229, 228, 3, 2, 2, 2, 230, 231, 3,
2, 2, 2, 231, 229, 3, 2, 2, 2, 231, 232, 3, 2, 2, 2, 232, 233, 3, 2, 2,
2, 233, 234, 8, 41, 2, 2, 234, 82, 3, 2, 2, 2, 235, 236, 7, 49, 2, 2, 236,
237, 7, 49, 2, 2, 237, 241, 3, 2, 2, 2, 238, 240, 10, 10, 2, 2, 239, 238,
3, 2, 2, 2, 240, 243, 3, 2, 2, 2, 241, 239, 3, 2, 2, 2, 241, 242, 3, 2,
2, 2, 242, 244, 3, 2, 2, 2, 243, 241, 3, 2, 2, 2, 244, 245, 8, 42, 2, 2,
245, 84, 3, 2, 2, 2, 246, 248, 5, 63, 32, 2, 247, 246, 3, 2, 2, 2, 248,
249, 3, 2, 2, 2, 249, 247, 3, 2, 2, 2, 249, 250, 3, 2, 2, 2, 250, 251,
3, 2, 2, 2, 251, 253, 7, 48, 2, 2, 252, 254, 5, 63, 32, 2, 253, 252, 3,
2, 2, 2, 254, 255, 3, 2, 2, 2, 255, 253, 3, 2, 2, 2, 255, 256, 3, 2, 2,
2, 256, 258, 3, 2, 2, 2, 257, 259, 5, 65, 33, 2, 258, 257, 3, 2, 2, 2,
258, 259, 3, 2, 2, 2, 259, 277, 3, 2, 2, 2, 260, 262, 5, 63, 32, 2, 261,
260, 3, 2, 2, 2, 262, 263, 3, 2, 2, 2, 263, 261, 3, 2, 2, 2, 263, 264,
3, 2, 2, 2, 264, 265, 3, 2, 2, 2, 265, 266, 5, 65, 33, 2, 266, 277, 3,
2, 2, 2, 267, 269, 7, 48, 2, 2, 268, 270, 5, 63, 32, 2, 269, 268, 3, 2,
2, 2, 270, 271, 3, 2, 2, 2, 271, 269, 3, 2, 2, 2, 271, 272, 3, 2, 2, 2,
272, 274, 3, 2, 2, 2, 273, 275, 5, 65, 33, 2, 274, 273, 3, 2, 2, 2, 274,
275, 3, 2, 2, 2, 275, 277, 3, 2, 2, 2, 276, 247, 3, 2, 2, 2, 276, 261,
3, 2, 2, 2, 276, 267, 3, 2, 2, 2, 277, 86, 3, 2, 2, 2, 278, 280, 5, 63,
32, 2, 279, 278, 3, 2, 2, 2, 280, 281, 3, 2, 2, 2, 281, 279, 3, 2, 2, 2,
281, 282, 3, 2, 2, 2, 282, 292, 3, 2, 2, 2, 283, 284, 7, 50, 2, 2, 284,
285, 7, 122, 2, 2, 285, 287, 3, 2, 2, 2, 286, 288, 5, 67, 34, 2, 287, 286,
3, 2, 2, 2, 288, 289, 3, 2, 2, 2, 289, 287, 3, 2, 2, 2, 289, 290, 3, 2,
2, 2, 290, 292, 3, 2, 2, 2, 291, 279, 3, 2, 2, 2, 291, 283, 3, 2, 2, 2,
292, 88, 3, 2, 2, 2, 293, 295, 5, 63, 32, 2, 294, 293, 3, 2, 2, 2, 295,
296, 3, 2, 2, 2, 296, 294, 3, 2, 2, 2, 296, 297, 3, 2, 2, 2, 297, 298,
3, 2, 2, 2, 298, 299, 9, 11, 2, 2, 299, 311, 3, 2, 2, 2, 300, 301, 7, 50,
2, 2, 301, 302, 7, 122, 2, 2, 302, 304, 3, 2, 2, 2, 303, 305, 5, 67, 34,
2, 304, 303, 3, 2, 2, 2, 305, 306, 3, 2, 2, 2, 306, 304, 3, 2, 2, 2, 306,
307, 3, 2, 2, 2, 307, 308, 3, 2, 2, 2, 308, 309, 9, 11, 2, 2, 309, 311,
3, 2, 2, 2, 310, 294, 3, 2, 2, 2, 310, 300, 3, 2, 2, 2, 311, 90, 3, 2,
2, 2, 312, 317, 7, 36, 2, 2, 313, 316, 5, 71, 36, 2, 314, 316, 10, 12,
2, 2, 315, 313, 3, 2, 2, 2, 315, 314, 3, 2, 2, 2, 316, 319, 3, 2, 2, 2,
317, 315, 3, 2, 2, 2, 317, 318, 3, 2, 2, 2, 318, 320, 3, 2, 2, 2, 319,
317, 3, 2, 2, 2, 320, 409, 7, 36, 2, 2, 321, 326, 7, 41, 2, 2, 322, 325,
5, 71, 36, 2, 323, 325, 10, 13, 2, 2, 324, 322, 3, 2, 2, 2, 324, 323, 3,
2, 2, 2, 325, 328, 3, 2, 2, 2, 326, 324, 3, 2, 2, 2, 326, 327, 3, 2, 2,
2, 327, 329, 3, 2, 2, 2, 328, 326, 3, 2, 2, 2, 329, 409, 7, 41, 2, 2, 330,
331, 7, 36, 2, 2, 331, 332, 7, 36, 2, 2, 332, 333, 7, 36, 2, 2, 333, 338,
3, 2, 2, 2, 334, 337, 5, 71, 36, 2, 335, 337, 10, 14, 2, 2, 336, 334, 3,
2, 2, 2, 336, 335, 3, 2, 2, 2, 337, 340, 3, 2, 2, 2, 338, 339, 3, 2, 2,
2, 338, 336, 3, 2, 2, 2, 339, 341, 3, 2, 2, 2, 340, 338, 3, 2, 2, 2, 341,
342, 7, 36, 2, 2, 342, 343, 7, 36, 2, 2, 343, 409, 7, 36, 2, 2, 344, 345,
7, 41, 2, 2, 345, 346, 7, 41, 2, 2, 346, 347, 7, 41, 2, 2, 347, 352, 3,
2, 2, 2, 348, 351, 5, 71, 36, 2, 349, 351, 10, 14, 2, 2, 350, 348, 3, 2,
2, 2, 350, 349, 3, 2, 2, 2, 351, 354, 3, 2, 2, 2, 352, 353, 3, 2, 2, 2,
352, 350, 3, 2, 2, 2, 353, 355, 3, 2, 2, 2, 354, 352, 3, 2, 2, 2, 355,
356, 7, 41, 2, 2, 356, 357, 7, 41, 2, 2, 357, 409, 7, 41, 2, 2, 358, 359,
5, 69, 35, 2, 359, 363, 7, 36, 2, 2, 360, 362, 10, 15, 2, 2, 361, 360,
3, 2, 2, 2, 362, 365, 3, 2, 2, 2, 363, 361, 3, 2, 2, 2, 363, 364, 3, 2,
2, 2, 364, 366, 3, 2, 2, 2, 365, 363, 3, 2, 2, 2, 366, 367, 7, 36, 2, 2,
367, 409, 3, 2, 2, 2, 368, 369, 5, 69, 35, 2, 369, 373, 7, 41, 2, 2, 370,
372, 10, 16, 2, 2, 371, 370, 3, 2, 2, 2, 372, 375, 3, 2, 2, 2, 373, 371,
3, 2, 2, 2, 373, 374, 3, 2, 2, 2, 374, 376, 3, 2, 2, 2, 375, 373, 3, 2,
2, 2, 376, 377, 7, 41, 2, 2, 377, 409, 3, 2, 2, 2, 378, 379, 5, 69, 35,
2, 379, 380, 7, 36, 2, 2, 380, 381, 7, 36, 2, 2, 381, 382, 7, 36, 2, 2,
382, 386, 3, 2, 2, 2, 383, 385, 11, 2, 2, 2, 384, 383, 3, 2, 2, 2, 385,
388, 3, 2, 2, 2, 386, 387, 3, 2, 2, 2, 386, 384, 3, 2, 2, 2, 387, 389,
3, 2, 2, 2, 388, 386, 3, 2, 2, 2, 389, 390, 7, 36, 2, 2, 390, 391, 7, 36,
2, 2, 391, 392, 7, 36, 2, 2, 392, 409, 3, 2, 2, 2, 393, 394, 5, 69, 35,
2, 394, 395, 7, 41, 2, 2, 395, 396, 7, 41, 2, 2, 396, 397, 7, 41, 2, 2,
397, 401, 3, 2, 2, 2, 398, 400, 11, 2, 2, 2, 399, 398, 3, 2, 2, 2, 400,
403, 3, 2, 2, 2, 401, 402, 3, 2, 2, 2, 401, 399, 3, 2, 2, 2, 402, 404,
3, 2, 2, 2, 403, 401, 3, 2, 2, 2, 404, 405, 7, 41, 2, 2, 405, 406, 7, 41,
2, 2, 406, 407, 7, 41, 2, 2, 407, 409, 3, 2, 2, 2, 408, 312, 3, 2, 2, 2,
408, 321, 3, 2, 2, 2, 408, 330, 3, 2, 2, 2, 408, 344, 3, 2, 2, 2, 408,
358, 3, 2, 2, 2, 408, 368, 3, 2, 2, 2, 408, 378, 3, 2, 2, 2, 408, 393,
3, 2, 2, 2, 409, 92, 3, 2, 2, 2, 410, 411, 9, 17, 2, 2, 411, 412, 5, 91,
46, 2, 412, 94, 3, 2, 2, 2, 413, 416, 5, 61, 31, 2, 414, 416, 7, 97, 2,
2, 415, 413, 3, 2, 2, 2, 415, 414, 3, 2, 2, 2, 416, 422, 3, 2, 2, 2, 417,
421, 5, 61, 31, 2, 418, 421, 5, 63, 32, 2, 419, 421, 7, 97, 2, 2, 420,
417, 3, 2, 2, 2, 420, 418, 3, 2, 2, 2, 420, 419, 3, 2, 2, 2, 421, 424,
3, 2, 2, 2, 422, 420, 3, 2, 2, 2, 422, 423, 3, 2, 2, 2, 423, 96, 3, 2,
2, 2, 424, 422, 3, 2, 2, 2, 38, 2, 178, 183, 193, 226, 231, 241, 249, 255,
258, 263, 271, 274, 276, 281, 289, 291, 296, 306, 310, 315, 317, 324, 326,
336, 338, 350, 352, 363, 373, 386, 401, 408, 415, 420, 422, 3, 2, 3, 2,
}
var lexerChannelNames = []string{
"DEFAULT_TOKEN_CHANNEL", "HIDDEN",
}
var lexerModeNames = []string{
"DEFAULT_MODE",
}
var lexerLiteralNames = []string{
"", "'=='", "'!='", "'in'", "'<'", "'<='", "'>='", "'>'", "'&&'", "'||'",
"'['", "']'", "'{'", "'}'", "'('", "')'", "'.'", "','", "'-'", "'!'", "'?'",
"':'", "'+'", "'*'", "'/'", "'%'", "'true'", "'false'", "'null'",
}
var lexerSymbolicNames = []string{
"", "EQUALS", "NOT_EQUALS", "IN", "LESS", "LESS_EQUALS", "GREATER_EQUALS",
"GREATER", "LOGICAL_AND", "LOGICAL_OR", "LBRACKET", "RPRACKET", "LBRACE",
"RBRACE", "LPAREN", "RPAREN", "DOT", "COMMA", "MINUS", "EXCLAM", "QUESTIONMARK",
"COLON", "PLUS", "STAR", "SLASH", "PERCENT", "CEL_TRUE", "CEL_FALSE", "NUL",
"WHITESPACE", "COMMENT", "NUM_FLOAT", "NUM_INT", "NUM_UINT", "STRING",
"BYTES", "IDENTIFIER",
}
var lexerRuleNames = []string{
"EQUALS", "NOT_EQUALS", "IN", "LESS", "LESS_EQUALS", "GREATER_EQUALS",
"GREATER", "LOGICAL_AND", "LOGICAL_OR", "LBRACKET", "RPRACKET", "LBRACE",
"RBRACE", "LPAREN", "RPAREN", "DOT", "COMMA", "MINUS", "EXCLAM", "QUESTIONMARK",
"COLON", "PLUS", "STAR", "SLASH", "PERCENT", "CEL_TRUE", "CEL_FALSE", "NUL",
"BACKSLASH", "LETTER", "DIGIT", "EXPONENT", "HEXDIGIT", "RAW", "ESC_SEQ",
"ESC_CHAR_SEQ", "ESC_OCT_SEQ", "ESC_BYTE_SEQ", "ESC_UNI_SEQ", "WHITESPACE",
"COMMENT", "NUM_FLOAT", "NUM_INT", "NUM_UINT", "STRING", "BYTES", "IDENTIFIER",
}
type CELLexer struct {
*antlr.BaseLexer
channelNames []string
@@ -255,28 +22,282 @@ type CELLexer struct {
// TODO: EOF string
}
// NewCELLexer produces a new lexer instance for the optional input antlr.CharStream.
//
// The *CELLexer instance produced may be reused by calling the SetInputStream method.
// The initial lexer configuration is expensive to construct, and the object is not thread-safe;
// however, if used within a Golang sync.Pool, the construction cost amortizes well and the
// objects can be used in a thread-safe manner.
func NewCELLexer(input antlr.CharStream) *CELLexer {
l := new(CELLexer)
lexerDeserializer := antlr.NewATNDeserializer(nil)
lexerAtn := lexerDeserializer.DeserializeFromUInt16(serializedLexerAtn)
lexerDecisionToDFA := make([]*antlr.DFA, len(lexerAtn.DecisionToState))
for index, ds := range lexerAtn.DecisionToState {
lexerDecisionToDFA[index] = antlr.NewDFA(ds, index)
}
l.BaseLexer = antlr.NewBaseLexer(input)
l.Interpreter = antlr.NewLexerATNSimulator(l, lexerAtn, lexerDecisionToDFA, antlr.NewPredictionContextCache())
var cellexerLexerStaticData struct {
once sync.Once
serializedATN []int32
channelNames []string
modeNames []string
literalNames []string
symbolicNames []string
ruleNames []string
predictionContextCache *antlr.PredictionContextCache
atn *antlr.ATN
decisionToDFA []*antlr.DFA
}
l.channelNames = lexerChannelNames
l.modeNames = lexerModeNames
l.RuleNames = lexerRuleNames
l.LiteralNames = lexerLiteralNames
l.SymbolicNames = lexerSymbolicNames
func cellexerLexerInit() {
staticData := &cellexerLexerStaticData
staticData.channelNames = []string{
"DEFAULT_TOKEN_CHANNEL", "HIDDEN",
}
staticData.modeNames = []string{
"DEFAULT_MODE",
}
staticData.literalNames = []string{
"", "'=='", "'!='", "'in'", "'<'", "'<='", "'>='", "'>'", "'&&'", "'||'",
"'['", "']'", "'{'", "'}'", "'('", "')'", "'.'", "','", "'-'", "'!'",
"'?'", "':'", "'+'", "'*'", "'/'", "'%'", "'true'", "'false'", "'null'",
}
staticData.symbolicNames = []string{
"", "EQUALS", "NOT_EQUALS", "IN", "LESS", "LESS_EQUALS", "GREATER_EQUALS",
"GREATER", "LOGICAL_AND", "LOGICAL_OR", "LBRACKET", "RPRACKET", "LBRACE",
"RBRACE", "LPAREN", "RPAREN", "DOT", "COMMA", "MINUS", "EXCLAM", "QUESTIONMARK",
"COLON", "PLUS", "STAR", "SLASH", "PERCENT", "CEL_TRUE", "CEL_FALSE",
"NUL", "WHITESPACE", "COMMENT", "NUM_FLOAT", "NUM_INT", "NUM_UINT",
"STRING", "BYTES", "IDENTIFIER",
}
staticData.ruleNames = []string{
"EQUALS", "NOT_EQUALS", "IN", "LESS", "LESS_EQUALS", "GREATER_EQUALS",
"GREATER", "LOGICAL_AND", "LOGICAL_OR", "LBRACKET", "RPRACKET", "LBRACE",
"RBRACE", "LPAREN", "RPAREN", "DOT", "COMMA", "MINUS", "EXCLAM", "QUESTIONMARK",
"COLON", "PLUS", "STAR", "SLASH", "PERCENT", "CEL_TRUE", "CEL_FALSE",
"NUL", "BACKSLASH", "LETTER", "DIGIT", "EXPONENT", "HEXDIGIT", "RAW",
"ESC_SEQ", "ESC_CHAR_SEQ", "ESC_OCT_SEQ", "ESC_BYTE_SEQ", "ESC_UNI_SEQ",
"WHITESPACE", "COMMENT", "NUM_FLOAT", "NUM_INT", "NUM_UINT", "STRING",
"BYTES", "IDENTIFIER",
}
staticData.predictionContextCache = antlr.NewPredictionContextCache()
staticData.serializedATN = []int32{
4, 0, 36, 423, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2,
10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15,
7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7,
20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25,
2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2,
31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36,
7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7,
41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46,
1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4,
1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8,
1, 8, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13,
1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 18, 1, 18, 1,
19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 24,
1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1,
26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29,
1, 30, 1, 30, 1, 31, 1, 31, 3, 31, 177, 8, 31, 1, 31, 4, 31, 180, 8, 31,
11, 31, 12, 31, 181, 1, 32, 1, 32, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1,
34, 3, 34, 192, 8, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36,
1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1,
38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38,
1, 38, 1, 38, 1, 38, 3, 38, 225, 8, 38, 1, 39, 4, 39, 228, 8, 39, 11, 39,
12, 39, 229, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 5, 40, 238, 8, 40,
10, 40, 12, 40, 241, 9, 40, 1, 40, 1, 40, 1, 41, 4, 41, 246, 8, 41, 11,
41, 12, 41, 247, 1, 41, 1, 41, 4, 41, 252, 8, 41, 11, 41, 12, 41, 253,
1, 41, 3, 41, 257, 8, 41, 1, 41, 4, 41, 260, 8, 41, 11, 41, 12, 41, 261,
1, 41, 1, 41, 1, 41, 1, 41, 4, 41, 268, 8, 41, 11, 41, 12, 41, 269, 1,
41, 3, 41, 273, 8, 41, 3, 41, 275, 8, 41, 1, 42, 4, 42, 278, 8, 42, 11,
42, 12, 42, 279, 1, 42, 1, 42, 1, 42, 1, 42, 4, 42, 286, 8, 42, 11, 42,
12, 42, 287, 3, 42, 290, 8, 42, 1, 43, 4, 43, 293, 8, 43, 11, 43, 12, 43,
294, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 4, 43, 303, 8, 43, 11, 43,
12, 43, 304, 1, 43, 1, 43, 3, 43, 309, 8, 43, 1, 44, 1, 44, 1, 44, 5, 44,
314, 8, 44, 10, 44, 12, 44, 317, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44, 5,
44, 323, 8, 44, 10, 44, 12, 44, 326, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44,
1, 44, 1, 44, 1, 44, 5, 44, 335, 8, 44, 10, 44, 12, 44, 338, 9, 44, 1,
44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 5, 44, 349,
8, 44, 10, 44, 12, 44, 352, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1,
44, 5, 44, 360, 8, 44, 10, 44, 12, 44, 363, 9, 44, 1, 44, 1, 44, 1, 44,
1, 44, 1, 44, 5, 44, 370, 8, 44, 10, 44, 12, 44, 373, 9, 44, 1, 44, 1,
44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 5, 44, 383, 8, 44, 10, 44,
12, 44, 386, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1,
44, 1, 44, 1, 44, 5, 44, 398, 8, 44, 10, 44, 12, 44, 401, 9, 44, 1, 44,
1, 44, 1, 44, 1, 44, 3, 44, 407, 8, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1,
46, 3, 46, 414, 8, 46, 1, 46, 1, 46, 1, 46, 5, 46, 419, 8, 46, 10, 46,
12, 46, 422, 9, 46, 4, 336, 350, 384, 399, 0, 47, 1, 1, 3, 2, 5, 3, 7,
4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27,
14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45,
23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 28, 57, 0, 59, 0, 61, 0, 63, 0,
65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 29, 81, 30, 83, 31,
85, 32, 87, 33, 89, 34, 91, 35, 93, 36, 1, 0, 16, 2, 0, 65, 90, 97, 122,
2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 3, 0, 48, 57, 65, 70, 97,
102, 2, 0, 82, 82, 114, 114, 10, 0, 34, 34, 39, 39, 63, 63, 92, 92, 96,
98, 102, 102, 110, 110, 114, 114, 116, 116, 118, 118, 2, 0, 88, 88, 120,
120, 3, 0, 9, 10, 12, 13, 32, 32, 1, 0, 10, 10, 2, 0, 85, 85, 117, 117,
4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 4, 0, 10, 10, 13, 13, 39, 39, 92,
92, 1, 0, 92, 92, 3, 0, 10, 10, 13, 13, 34, 34, 3, 0, 10, 10, 13, 13, 39,
39, 2, 0, 66, 66, 98, 98, 456, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5,
1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13,
1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0,
21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0,
0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0,
0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0,
0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1,
0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 79, 1, 0, 0, 0, 0, 81,
1, 0, 0, 0, 0, 83, 1, 0, 0, 0, 0, 85, 1, 0, 0, 0, 0, 87, 1, 0, 0, 0, 0,
89, 1, 0, 0, 0, 0, 91, 1, 0, 0, 0, 0, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0,
3, 98, 1, 0, 0, 0, 5, 101, 1, 0, 0, 0, 7, 104, 1, 0, 0, 0, 9, 106, 1, 0,
0, 0, 11, 109, 1, 0, 0, 0, 13, 112, 1, 0, 0, 0, 15, 114, 1, 0, 0, 0, 17,
117, 1, 0, 0, 0, 19, 120, 1, 0, 0, 0, 21, 122, 1, 0, 0, 0, 23, 124, 1,
0, 0, 0, 25, 126, 1, 0, 0, 0, 27, 128, 1, 0, 0, 0, 29, 130, 1, 0, 0, 0,
31, 132, 1, 0, 0, 0, 33, 134, 1, 0, 0, 0, 35, 136, 1, 0, 0, 0, 37, 138,
1, 0, 0, 0, 39, 140, 1, 0, 0, 0, 41, 142, 1, 0, 0, 0, 43, 144, 1, 0, 0,
0, 45, 146, 1, 0, 0, 0, 47, 148, 1, 0, 0, 0, 49, 150, 1, 0, 0, 0, 51, 152,
1, 0, 0, 0, 53, 157, 1, 0, 0, 0, 55, 163, 1, 0, 0, 0, 57, 168, 1, 0, 0,
0, 59, 170, 1, 0, 0, 0, 61, 172, 1, 0, 0, 0, 63, 174, 1, 0, 0, 0, 65, 183,
1, 0, 0, 0, 67, 185, 1, 0, 0, 0, 69, 191, 1, 0, 0, 0, 71, 193, 1, 0, 0,
0, 73, 196, 1, 0, 0, 0, 75, 201, 1, 0, 0, 0, 77, 224, 1, 0, 0, 0, 79, 227,
1, 0, 0, 0, 81, 233, 1, 0, 0, 0, 83, 274, 1, 0, 0, 0, 85, 289, 1, 0, 0,
0, 87, 308, 1, 0, 0, 0, 89, 406, 1, 0, 0, 0, 91, 408, 1, 0, 0, 0, 93, 413,
1, 0, 0, 0, 95, 96, 5, 61, 0, 0, 96, 97, 5, 61, 0, 0, 97, 2, 1, 0, 0, 0,
98, 99, 5, 33, 0, 0, 99, 100, 5, 61, 0, 0, 100, 4, 1, 0, 0, 0, 101, 102,
5, 105, 0, 0, 102, 103, 5, 110, 0, 0, 103, 6, 1, 0, 0, 0, 104, 105, 5,
60, 0, 0, 105, 8, 1, 0, 0, 0, 106, 107, 5, 60, 0, 0, 107, 108, 5, 61, 0,
0, 108, 10, 1, 0, 0, 0, 109, 110, 5, 62, 0, 0, 110, 111, 5, 61, 0, 0, 111,
12, 1, 0, 0, 0, 112, 113, 5, 62, 0, 0, 113, 14, 1, 0, 0, 0, 114, 115, 5,
38, 0, 0, 115, 116, 5, 38, 0, 0, 116, 16, 1, 0, 0, 0, 117, 118, 5, 124,
0, 0, 118, 119, 5, 124, 0, 0, 119, 18, 1, 0, 0, 0, 120, 121, 5, 91, 0,
0, 121, 20, 1, 0, 0, 0, 122, 123, 5, 93, 0, 0, 123, 22, 1, 0, 0, 0, 124,
125, 5, 123, 0, 0, 125, 24, 1, 0, 0, 0, 126, 127, 5, 125, 0, 0, 127, 26,
1, 0, 0, 0, 128, 129, 5, 40, 0, 0, 129, 28, 1, 0, 0, 0, 130, 131, 5, 41,
0, 0, 131, 30, 1, 0, 0, 0, 132, 133, 5, 46, 0, 0, 133, 32, 1, 0, 0, 0,
134, 135, 5, 44, 0, 0, 135, 34, 1, 0, 0, 0, 136, 137, 5, 45, 0, 0, 137,
36, 1, 0, 0, 0, 138, 139, 5, 33, 0, 0, 139, 38, 1, 0, 0, 0, 140, 141, 5,
63, 0, 0, 141, 40, 1, 0, 0, 0, 142, 143, 5, 58, 0, 0, 143, 42, 1, 0, 0,
0, 144, 145, 5, 43, 0, 0, 145, 44, 1, 0, 0, 0, 146, 147, 5, 42, 0, 0, 147,
46, 1, 0, 0, 0, 148, 149, 5, 47, 0, 0, 149, 48, 1, 0, 0, 0, 150, 151, 5,
37, 0, 0, 151, 50, 1, 0, 0, 0, 152, 153, 5, 116, 0, 0, 153, 154, 5, 114,
0, 0, 154, 155, 5, 117, 0, 0, 155, 156, 5, 101, 0, 0, 156, 52, 1, 0, 0,
0, 157, 158, 5, 102, 0, 0, 158, 159, 5, 97, 0, 0, 159, 160, 5, 108, 0,
0, 160, 161, 5, 115, 0, 0, 161, 162, 5, 101, 0, 0, 162, 54, 1, 0, 0, 0,
163, 164, 5, 110, 0, 0, 164, 165, 5, 117, 0, 0, 165, 166, 5, 108, 0, 0,
166, 167, 5, 108, 0, 0, 167, 56, 1, 0, 0, 0, 168, 169, 5, 92, 0, 0, 169,
58, 1, 0, 0, 0, 170, 171, 7, 0, 0, 0, 171, 60, 1, 0, 0, 0, 172, 173, 2,
48, 57, 0, 173, 62, 1, 0, 0, 0, 174, 176, 7, 1, 0, 0, 175, 177, 7, 2, 0,
0, 176, 175, 1, 0, 0, 0, 176, 177, 1, 0, 0, 0, 177, 179, 1, 0, 0, 0, 178,
180, 3, 61, 30, 0, 179, 178, 1, 0, 0, 0, 180, 181, 1, 0, 0, 0, 181, 179,
1, 0, 0, 0, 181, 182, 1, 0, 0, 0, 182, 64, 1, 0, 0, 0, 183, 184, 7, 3,
0, 0, 184, 66, 1, 0, 0, 0, 185, 186, 7, 4, 0, 0, 186, 68, 1, 0, 0, 0, 187,
192, 3, 71, 35, 0, 188, 192, 3, 75, 37, 0, 189, 192, 3, 77, 38, 0, 190,
192, 3, 73, 36, 0, 191, 187, 1, 0, 0, 0, 191, 188, 1, 0, 0, 0, 191, 189,
1, 0, 0, 0, 191, 190, 1, 0, 0, 0, 192, 70, 1, 0, 0, 0, 193, 194, 3, 57,
28, 0, 194, 195, 7, 5, 0, 0, 195, 72, 1, 0, 0, 0, 196, 197, 3, 57, 28,
0, 197, 198, 2, 48, 51, 0, 198, 199, 2, 48, 55, 0, 199, 200, 2, 48, 55,
0, 200, 74, 1, 0, 0, 0, 201, 202, 3, 57, 28, 0, 202, 203, 7, 6, 0, 0, 203,
204, 3, 65, 32, 0, 204, 205, 3, 65, 32, 0, 205, 76, 1, 0, 0, 0, 206, 207,
3, 57, 28, 0, 207, 208, 5, 117, 0, 0, 208, 209, 3, 65, 32, 0, 209, 210,
3, 65, 32, 0, 210, 211, 3, 65, 32, 0, 211, 212, 3, 65, 32, 0, 212, 225,
1, 0, 0, 0, 213, 214, 3, 57, 28, 0, 214, 215, 5, 85, 0, 0, 215, 216, 3,
65, 32, 0, 216, 217, 3, 65, 32, 0, 217, 218, 3, 65, 32, 0, 218, 219, 3,
65, 32, 0, 219, 220, 3, 65, 32, 0, 220, 221, 3, 65, 32, 0, 221, 222, 3,
65, 32, 0, 222, 223, 3, 65, 32, 0, 223, 225, 1, 0, 0, 0, 224, 206, 1, 0,
0, 0, 224, 213, 1, 0, 0, 0, 225, 78, 1, 0, 0, 0, 226, 228, 7, 7, 0, 0,
227, 226, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 227, 1, 0, 0, 0, 229,
230, 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 232, 6, 39, 0, 0, 232, 80,
1, 0, 0, 0, 233, 234, 5, 47, 0, 0, 234, 235, 5, 47, 0, 0, 235, 239, 1,
0, 0, 0, 236, 238, 8, 8, 0, 0, 237, 236, 1, 0, 0, 0, 238, 241, 1, 0, 0,
0, 239, 237, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 242, 1, 0, 0, 0, 241,
239, 1, 0, 0, 0, 242, 243, 6, 40, 0, 0, 243, 82, 1, 0, 0, 0, 244, 246,
3, 61, 30, 0, 245, 244, 1, 0, 0, 0, 246, 247, 1, 0, 0, 0, 247, 245, 1,
0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 249, 1, 0, 0, 0, 249, 251, 5, 46, 0,
0, 250, 252, 3, 61, 30, 0, 251, 250, 1, 0, 0, 0, 252, 253, 1, 0, 0, 0,
253, 251, 1, 0, 0, 0, 253, 254, 1, 0, 0, 0, 254, 256, 1, 0, 0, 0, 255,
257, 3, 63, 31, 0, 256, 255, 1, 0, 0, 0, 256, 257, 1, 0, 0, 0, 257, 275,
1, 0, 0, 0, 258, 260, 3, 61, 30, 0, 259, 258, 1, 0, 0, 0, 260, 261, 1,
0, 0, 0, 261, 259, 1, 0, 0, 0, 261, 262, 1, 0, 0, 0, 262, 263, 1, 0, 0,
0, 263, 264, 3, 63, 31, 0, 264, 275, 1, 0, 0, 0, 265, 267, 5, 46, 0, 0,
266, 268, 3, 61, 30, 0, 267, 266, 1, 0, 0, 0, 268, 269, 1, 0, 0, 0, 269,
267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 272, 1, 0, 0, 0, 271, 273,
3, 63, 31, 0, 272, 271, 1, 0, 0, 0, 272, 273, 1, 0, 0, 0, 273, 275, 1,
0, 0, 0, 274, 245, 1, 0, 0, 0, 274, 259, 1, 0, 0, 0, 274, 265, 1, 0, 0,
0, 275, 84, 1, 0, 0, 0, 276, 278, 3, 61, 30, 0, 277, 276, 1, 0, 0, 0, 278,
279, 1, 0, 0, 0, 279, 277, 1, 0, 0, 0, 279, 280, 1, 0, 0, 0, 280, 290,
1, 0, 0, 0, 281, 282, 5, 48, 0, 0, 282, 283, 5, 120, 0, 0, 283, 285, 1,
0, 0, 0, 284, 286, 3, 65, 32, 0, 285, 284, 1, 0, 0, 0, 286, 287, 1, 0,
0, 0, 287, 285, 1, 0, 0, 0, 287, 288, 1, 0, 0, 0, 288, 290, 1, 0, 0, 0,
289, 277, 1, 0, 0, 0, 289, 281, 1, 0, 0, 0, 290, 86, 1, 0, 0, 0, 291, 293,
3, 61, 30, 0, 292, 291, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 292, 1,
0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 296, 1, 0, 0, 0, 296, 297, 7, 9, 0,
0, 297, 309, 1, 0, 0, 0, 298, 299, 5, 48, 0, 0, 299, 300, 5, 120, 0, 0,
300, 302, 1, 0, 0, 0, 301, 303, 3, 65, 32, 0, 302, 301, 1, 0, 0, 0, 303,
304, 1, 0, 0, 0, 304, 302, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 306,
1, 0, 0, 0, 306, 307, 7, 9, 0, 0, 307, 309, 1, 0, 0, 0, 308, 292, 1, 0,
0, 0, 308, 298, 1, 0, 0, 0, 309, 88, 1, 0, 0, 0, 310, 315, 5, 34, 0, 0,
311, 314, 3, 69, 34, 0, 312, 314, 8, 10, 0, 0, 313, 311, 1, 0, 0, 0, 313,
312, 1, 0, 0, 0, 314, 317, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 315, 316,
1, 0, 0, 0, 316, 318, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 318, 407, 5, 34,
0, 0, 319, 324, 5, 39, 0, 0, 320, 323, 3, 69, 34, 0, 321, 323, 8, 11, 0,
0, 322, 320, 1, 0, 0, 0, 322, 321, 1, 0, 0, 0, 323, 326, 1, 0, 0, 0, 324,
322, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 327, 1, 0, 0, 0, 326, 324,
1, 0, 0, 0, 327, 407, 5, 39, 0, 0, 328, 329, 5, 34, 0, 0, 329, 330, 5,
34, 0, 0, 330, 331, 5, 34, 0, 0, 331, 336, 1, 0, 0, 0, 332, 335, 3, 69,
34, 0, 333, 335, 8, 12, 0, 0, 334, 332, 1, 0, 0, 0, 334, 333, 1, 0, 0,
0, 335, 338, 1, 0, 0, 0, 336, 337, 1, 0, 0, 0, 336, 334, 1, 0, 0, 0, 337,
339, 1, 0, 0, 0, 338, 336, 1, 0, 0, 0, 339, 340, 5, 34, 0, 0, 340, 341,
5, 34, 0, 0, 341, 407, 5, 34, 0, 0, 342, 343, 5, 39, 0, 0, 343, 344, 5,
39, 0, 0, 344, 345, 5, 39, 0, 0, 345, 350, 1, 0, 0, 0, 346, 349, 3, 69,
34, 0, 347, 349, 8, 12, 0, 0, 348, 346, 1, 0, 0, 0, 348, 347, 1, 0, 0,
0, 349, 352, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 351,
353, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 353, 354, 5, 39, 0, 0, 354, 355,
5, 39, 0, 0, 355, 407, 5, 39, 0, 0, 356, 357, 3, 67, 33, 0, 357, 361, 5,
34, 0, 0, 358, 360, 8, 13, 0, 0, 359, 358, 1, 0, 0, 0, 360, 363, 1, 0,
0, 0, 361, 359, 1, 0, 0, 0, 361, 362, 1, 0, 0, 0, 362, 364, 1, 0, 0, 0,
363, 361, 1, 0, 0, 0, 364, 365, 5, 34, 0, 0, 365, 407, 1, 0, 0, 0, 366,
367, 3, 67, 33, 0, 367, 371, 5, 39, 0, 0, 368, 370, 8, 14, 0, 0, 369, 368,
1, 0, 0, 0, 370, 373, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 371, 372, 1, 0,
0, 0, 372, 374, 1, 0, 0, 0, 373, 371, 1, 0, 0, 0, 374, 375, 5, 39, 0, 0,
375, 407, 1, 0, 0, 0, 376, 377, 3, 67, 33, 0, 377, 378, 5, 34, 0, 0, 378,
379, 5, 34, 0, 0, 379, 380, 5, 34, 0, 0, 380, 384, 1, 0, 0, 0, 381, 383,
9, 0, 0, 0, 382, 381, 1, 0, 0, 0, 383, 386, 1, 0, 0, 0, 384, 385, 1, 0,
0, 0, 384, 382, 1, 0, 0, 0, 385, 387, 1, 0, 0, 0, 386, 384, 1, 0, 0, 0,
387, 388, 5, 34, 0, 0, 388, 389, 5, 34, 0, 0, 389, 390, 5, 34, 0, 0, 390,
407, 1, 0, 0, 0, 391, 392, 3, 67, 33, 0, 392, 393, 5, 39, 0, 0, 393, 394,
5, 39, 0, 0, 394, 395, 5, 39, 0, 0, 395, 399, 1, 0, 0, 0, 396, 398, 9,
0, 0, 0, 397, 396, 1, 0, 0, 0, 398, 401, 1, 0, 0, 0, 399, 400, 1, 0, 0,
0, 399, 397, 1, 0, 0, 0, 400, 402, 1, 0, 0, 0, 401, 399, 1, 0, 0, 0, 402,
403, 5, 39, 0, 0, 403, 404, 5, 39, 0, 0, 404, 405, 5, 39, 0, 0, 405, 407,
1, 0, 0, 0, 406, 310, 1, 0, 0, 0, 406, 319, 1, 0, 0, 0, 406, 328, 1, 0,
0, 0, 406, 342, 1, 0, 0, 0, 406, 356, 1, 0, 0, 0, 406, 366, 1, 0, 0, 0,
406, 376, 1, 0, 0, 0, 406, 391, 1, 0, 0, 0, 407, 90, 1, 0, 0, 0, 408, 409,
7, 15, 0, 0, 409, 410, 3, 89, 44, 0, 410, 92, 1, 0, 0, 0, 411, 414, 3,
59, 29, 0, 412, 414, 5, 95, 0, 0, 413, 411, 1, 0, 0, 0, 413, 412, 1, 0,
0, 0, 414, 420, 1, 0, 0, 0, 415, 419, 3, 59, 29, 0, 416, 419, 3, 61, 30,
0, 417, 419, 5, 95, 0, 0, 418, 415, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 418,
417, 1, 0, 0, 0, 419, 422, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 420, 421,
1, 0, 0, 0, 421, 94, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 36, 0, 176, 181,
191, 224, 229, 239, 247, 253, 256, 261, 269, 272, 274, 279, 287, 289, 294,
304, 308, 313, 315, 322, 324, 334, 336, 348, 350, 361, 371, 384, 399, 406,
413, 418, 420, 1, 0, 1, 0,
}
deserializer := antlr.NewATNDeserializer(nil)
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
atn := staticData.atn
staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState))
decisionToDFA := staticData.decisionToDFA
for index, state := range atn.DecisionToState {
decisionToDFA[index] = antlr.NewDFA(state, index)
}
}
// CELLexerInit initializes any static state used to implement CELLexer. By default the
// static state used to implement the lexer is lazily initialized during the first call to
// NewCELLexer(). You can call this function if you wish to initialize the static state ahead
// of time.
func CELLexerInit() {
staticData := &cellexerLexerStaticData
staticData.once.Do(cellexerLexerInit)
}
// NewCELLexer produces a new lexer instance for the optional input antlr.CharStream.
func NewCELLexer(input antlr.CharStream) *CELLexer {
CELLexerInit()
l := new(CELLexer)
l.BaseLexer = antlr.NewBaseLexer(input)
staticData := &cellexerLexerStaticData
l.Interpreter = antlr.NewLexerATNSimulator(l, staticData.atn, staticData.decisionToDFA, staticData.predictionContextCache)
l.channelNames = staticData.channelNames
l.modeNames = staticData.modeNames
l.RuleNames = staticData.ruleNames
l.LiteralNames = staticData.literalNames
l.SymbolicNames = staticData.symbolicNames
l.GrammarFileName = "CEL.g4"
// TODO: l.EOF = antlr.TokenEOF

View File

@@ -1,4 +1,4 @@
// Code generated from /Users/tswadell/go/src/github.com/google/cel-go/bin/../parser/gen/CEL.g4 by ANTLR 4.9.1. DO NOT EDIT.
// Code generated from /Users/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.10.1. DO NOT EDIT.
package gen // CEL
import "github.com/antlr/antlr4/runtime/Go/antlr"

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
// Code generated from /Users/tswadell/go/src/github.com/google/cel-go/bin/../parser/gen/CEL.g4 by ANTLR 4.9.1. DO NOT EDIT.
// Code generated from /Users/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.10.1. DO NOT EDIT.
package gen // CEL
import "github.com/antlr/antlr4/runtime/Go/antlr"

35
vendor/github.com/google/cel-go/parser/gen/generate.sh generated vendored Normal file
View File

@@ -0,0 +1,35 @@
#!/bin/bash -eu
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# To regenerate the CEL lexer/parser statically do the following:
# 1. Download the latest anltr tool from https://www.antlr.org/download.html
# 2. Copy the downloaded jar to the gen directory. It will have a name
# like antlr-<version>-complete.jar.
# 3. Modify the script below to refer to the current ANTLR version.
# 4. Execute the generation script from the gen directory.
# 5. Delete the jar and commit the regenerated sources.
#!/bin/sh
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# Generate AntLR artifacts.
java -Xmx500M -cp ${DIR}/antlr-4.10.1-complete.jar org.antlr.v4.Tool \
-Dlanguage=Go \
-package gen \
-o ${DIR} \
-visitor ${DIR}/CEL.g4

View File

@@ -435,6 +435,11 @@ func (e *exprHelper) Ident(name string) *exprpb.Expr {
return e.parserHelper.newIdent(e.nextMacroID(), name)
}
// AccuIdent implements the ExprHelper interface method.
func (e *exprHelper) AccuIdent() *exprpb.Expr {
return e.parserHelper.newIdent(e.nextMacroID(), AccumulatorName)
}
// GlobalCall implements the ExprHelper interface method.
func (e *exprHelper) GlobalCall(function string, args ...*exprpb.Expr) *exprpb.Expr {
return e.parserHelper.newGlobalCall(e.nextMacroID(), function, args...)

View File

@@ -23,8 +23,6 @@ import (
exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
)
// TODO: Consider moving macros to common.
// NewGlobalMacro creates a Macro for a global function with the specified arg count.
func NewGlobalMacro(function string, argCount int, expander MacroExpander) Macro {
return &macro{
@@ -50,8 +48,7 @@ func NewGlobalVarArgMacro(function string, expander MacroExpander) Macro {
varArgStyle: true}
}
// NewReceiverVarArgMacro creates a Macro for a receiver function matching a variable arg
// count.
// NewReceiverVarArgMacro creates a Macro for a receiver function matching a variable arg count.
func NewReceiverVarArgMacro(function string, expander MacroExpander) Macro {
return &macro{
function: function,
@@ -135,9 +132,13 @@ func makeVarArgMacroKey(name string, receiverStyle bool) string {
return fmt.Sprintf("%s:*:%v", name, receiverStyle)
}
// MacroExpander converts the target and args of a function call that matches a Macro.
// MacroExpander converts a call and its associated arguments into a new CEL abstract syntax tree, or an error
// if the input arguments are not suitable for the expansion requirements for the macro in question.
//
// Note: when the Macros.IsReceiverStyle() is true, the target argument will be nil.
// The MacroExpander accepts as arguments a MacroExprHelper as well as the arguments used in the function call
// and produces as output an Expr ast node.
//
// Note: when the Macro.IsReceiverStyle() method returns true, the target argument will be nil.
type MacroExpander func(eh ExprHelper,
target *exprpb.Expr,
args []*exprpb.Expr) (*exprpb.Expr, *common.Error)
@@ -208,6 +209,9 @@ type ExprHelper interface {
// Ident creates an identifier Expr value.
Ident(name string) *exprpb.Expr
// AccuIdent returns an accumulator identifier for use with comprehension results.
AccuIdent() *exprpb.Expr
// GlobalCall creates a function call Expr value for a global (free) function.
GlobalCall(function string, args ...*exprpb.Expr) *exprpb.Expr
@@ -225,34 +229,44 @@ type ExprHelper interface {
}
var (
// HasMacro expands "has(m.f)" which tests the presence of a field, avoiding the need to
// specify the field as a string.
HasMacro = NewGlobalMacro(operators.Has, 1, MakeHas)
// AllMacro expands "range.all(var, predicate)" into a comprehension which ensures that all
// elements in the range satisfy the predicate.
AllMacro = NewReceiverMacro(operators.All, 2, MakeAll)
// ExistsMacro expands "range.exists(var, predicate)" into a comprehension which ensures that
// some element in the range satisfies the predicate.
ExistsMacro = NewReceiverMacro(operators.Exists, 2, MakeExists)
// ExistsOneMacro expands "range.exists_one(var, predicate)", which is true if for exactly one
// element in range the predicate holds.
ExistsOneMacro = NewReceiverMacro(operators.ExistsOne, 2, MakeExistsOne)
// MapMacro expands "range.map(var, function)" into a comprehension which applies the function
// to each element in the range to produce a new list.
MapMacro = NewReceiverMacro(operators.Map, 2, MakeMap)
// MapFilterMacro expands "range.map(var, predicate, function)" into a comprehension which
// first filters the elements in the range by the predicate, then applies the transform function
// to produce a new list.
MapFilterMacro = NewReceiverMacro(operators.Map, 3, MakeMap)
// FilterMacro expands "range.filter(var, predicate)" into a comprehension which filters
// elements in the range, producing a new list from the elements that satisfy the predicate.
FilterMacro = NewReceiverMacro(operators.Filter, 2, MakeFilter)
// AllMacros includes the list of all spec-supported macros.
AllMacros = []Macro{
// The macro "has(m.f)" which tests the presence of a field, avoiding the need to specify
// the field as a string.
NewGlobalMacro(operators.Has, 1, makeHas),
// The macro "range.all(var, predicate)", which is true if for all elements in range the
// predicate holds.
NewReceiverMacro(operators.All, 2, makeAll),
// The macro "range.exists(var, predicate)", which is true if for at least one element in
// range the predicate holds.
NewReceiverMacro(operators.Exists, 2, makeExists),
// The macro "range.exists_one(var, predicate)", which is true if for exactly one element
// in range the predicate holds.
NewReceiverMacro(operators.ExistsOne, 2, makeExistsOne),
// The macro "range.map(var, function)", applies the function to the vars in the range.
NewReceiverMacro(operators.Map, 2, makeMap),
// The macro "range.map(var, predicate, function)", applies the function to the vars in
// the range for which the predicate holds true. The other variables are filtered out.
NewReceiverMacro(operators.Map, 3, makeMap),
// The macro "range.filter(var, predicate)", filters out the variables for which the
// predicate is false.
NewReceiverMacro(operators.Filter, 2, makeFilter),
HasMacro,
AllMacro,
ExistsMacro,
ExistsOneMacro,
MapMacro,
MapFilterMacro,
FilterMacro,
}
// NoMacros list.
@@ -270,62 +284,36 @@ const (
quantifierExistsOne
)
func makeAll(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) {
// MakeAll expands the input call arguments into a comprehension that returns true if all of the
// elements in the range match the predicate expressions:
// <iterRange>.all(<iterVar>, <predicate>)
func MakeAll(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) {
return makeQuantifier(quantifierAll, eh, target, args)
}
func makeExists(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) {
// MakeExists expands the input call arguments into a comprehension that returns true if any of the
// elements in the range match the predicate expressions:
// <iterRange>.exists(<iterVar>, <predicate>)
func MakeExists(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) {
return makeQuantifier(quantifierExists, eh, target, args)
}
func makeExistsOne(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) {
// MakeExistsOne expands the input call arguments into a comprehension that returns true if exactly
// one of the elements in the range match the predicate expressions:
// <iterRange>.exists_one(<iterVar>, <predicate>)
func MakeExistsOne(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) {
return makeQuantifier(quantifierExistsOne, eh, target, args)
}
func makeQuantifier(kind quantifierKind, eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) {
v, found := extractIdent(args[0])
if !found {
location := eh.OffsetLocation(args[0].GetId())
return nil, &common.Error{
Message: "argument must be a simple name",
Location: location}
}
accuIdent := func() *exprpb.Expr {
return eh.Ident(AccumulatorName)
}
var init *exprpb.Expr
var condition *exprpb.Expr
var step *exprpb.Expr
var result *exprpb.Expr
switch kind {
case quantifierAll:
init = eh.LiteralBool(true)
condition = eh.GlobalCall(operators.NotStrictlyFalse, accuIdent())
step = eh.GlobalCall(operators.LogicalAnd, accuIdent(), args[1])
result = accuIdent()
case quantifierExists:
init = eh.LiteralBool(false)
condition = eh.GlobalCall(
operators.NotStrictlyFalse,
eh.GlobalCall(operators.LogicalNot, accuIdent()))
step = eh.GlobalCall(operators.LogicalOr, accuIdent(), args[1])
result = accuIdent()
case quantifierExistsOne:
zeroExpr := eh.LiteralInt(0)
oneExpr := eh.LiteralInt(1)
init = zeroExpr
condition = eh.LiteralBool(true)
step = eh.GlobalCall(operators.Conditional, args[1],
eh.GlobalCall(operators.Add, accuIdent(), oneExpr), accuIdent())
result = eh.GlobalCall(operators.Equals, accuIdent(), oneExpr)
default:
return nil, &common.Error{Message: fmt.Sprintf("unrecognized quantifier '%v'", kind)}
}
return eh.Fold(v, target, AccumulatorName, init, condition, step, result), nil
}
func makeMap(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) {
// MakeMap expands the input call arguments into a comprehension that transforms each element in the
// input to produce an output list.
//
// There are two call patterns supported by map:
// <iterRange>.map(<iterVar>, <transform>)
// <iterRange>.map(<iterVar>, <predicate>, <transform>)
// In the second form only iterVar values which return true when provided to the predicate expression
// are transformed.
func MakeMap(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) {
v, found := extractIdent(args[0])
if !found {
return nil, &common.Error{Message: "argument is not an identifier"}
@@ -345,7 +333,6 @@ func makeMap(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.E
accuExpr := eh.Ident(AccumulatorName)
init := eh.NewList()
condition := eh.LiteralBool(true)
// TODO: use compiler internal method for faster, stateful add.
step := eh.GlobalCall(operators.Add, accuExpr, eh.NewList(fn))
if filter != nil {
@@ -354,7 +341,10 @@ func makeMap(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.E
return eh.Fold(v, target, AccumulatorName, init, condition, step, accuExpr), nil
}
func makeFilter(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) {
// MakeFilter expands the input call arguments into a comprehension which produces a list which contains
// only elements which match the provided predicate expression:
// <iterRange>.filter(<iterVar>, <predicate>)
func MakeFilter(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) {
v, found := extractIdent(args[0])
if !found {
return nil, &common.Error{Message: "argument is not an identifier"}
@@ -364,12 +354,60 @@ func makeFilter(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprp
accuExpr := eh.Ident(AccumulatorName)
init := eh.NewList()
condition := eh.LiteralBool(true)
// TODO: use compiler internal method for faster, stateful add.
step := eh.GlobalCall(operators.Add, accuExpr, eh.NewList(args[0]))
step = eh.GlobalCall(operators.Conditional, filter, step, accuExpr)
return eh.Fold(v, target, AccumulatorName, init, condition, step, accuExpr), nil
}
// MakeHas expands the input call arguments into a presence test, e.g. has(<operand>.field)
func MakeHas(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) {
if s, ok := args[0].ExprKind.(*exprpb.Expr_SelectExpr); ok {
return eh.PresenceTest(s.SelectExpr.GetOperand(), s.SelectExpr.GetField()), nil
}
return nil, &common.Error{Message: "invalid argument to has() macro"}
}
func makeQuantifier(kind quantifierKind, eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) {
v, found := extractIdent(args[0])
if !found {
location := eh.OffsetLocation(args[0].GetId())
return nil, &common.Error{
Message: "argument must be a simple name",
Location: location,
}
}
var init *exprpb.Expr
var condition *exprpb.Expr
var step *exprpb.Expr
var result *exprpb.Expr
switch kind {
case quantifierAll:
init = eh.LiteralBool(true)
condition = eh.GlobalCall(operators.NotStrictlyFalse, eh.AccuIdent())
step = eh.GlobalCall(operators.LogicalAnd, eh.AccuIdent(), args[1])
result = eh.AccuIdent()
case quantifierExists:
init = eh.LiteralBool(false)
condition = eh.GlobalCall(
operators.NotStrictlyFalse,
eh.GlobalCall(operators.LogicalNot, eh.AccuIdent()))
step = eh.GlobalCall(operators.LogicalOr, eh.AccuIdent(), args[1])
result = eh.AccuIdent()
case quantifierExistsOne:
zeroExpr := eh.LiteralInt(0)
oneExpr := eh.LiteralInt(1)
init = zeroExpr
condition = eh.LiteralBool(true)
step = eh.GlobalCall(operators.Conditional, args[1],
eh.GlobalCall(operators.Add, eh.AccuIdent(), oneExpr), eh.AccuIdent())
result = eh.GlobalCall(operators.Equals, eh.AccuIdent(), oneExpr)
default:
return nil, &common.Error{Message: fmt.Sprintf("unrecognized quantifier '%v'", kind)}
}
return eh.Fold(v, target, AccumulatorName, init, condition, step, result), nil
}
func extractIdent(e *exprpb.Expr) (string, bool) {
switch e.ExprKind.(type) {
case *exprpb.Expr_IdentExpr:
@@ -377,10 +415,3 @@ func extractIdent(e *exprpb.Expr) (string, bool) {
}
return "", false
}
func makeHas(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) {
if s, ok := args[0].ExprKind.(*exprpb.Expr_SelectExpr); ok {
return eh.PresenceTest(s.SelectExpr.GetOperand(), s.SelectExpr.GetField()), nil
}
return nil, &common.Error{Message: "invalid argument to has() macro"}
}

View File

@@ -46,7 +46,7 @@ func NewParser(opts ...Option) (*Parser, error) {
}
}
if p.maxRecursionDepth == 0 {
p.maxRecursionDepth = 200
p.maxRecursionDepth = 250
}
if p.maxRecursionDepth == -1 {
p.maxRecursionDepth = int((^uint(0)) >> 1)
@@ -270,6 +270,7 @@ type parser struct {
errors *parseErrors
helper *parserHelper
macros map[string]Macro
recursionDepth int
maxRecursionDepth int
errorRecoveryLimit int
errorRecoveryLookaheadTokenLimit int
@@ -352,6 +353,13 @@ func (p *parser) parse(expr runes.Buffer, desc string) *exprpb.Expr {
// Visitor implementations.
func (p *parser) Visit(tree antlr.ParseTree) interface{} {
p.recursionDepth++
if p.recursionDepth > p.maxRecursionDepth {
panic(&recursionError{message: "max recursion depth exceeded"})
}
defer func() {
p.recursionDepth--
}()
switch tree.(type) {
case *gen.StartContext:
return p.VisitStart(tree.(*gen.StartContext))

View File

@@ -36,9 +36,29 @@ import (
// - Floating point values are converted to the small number of digits needed to represent the value.
// - Spacing around punctuation marks may be lost.
// - Parentheses will only be applied when they affect operator precedence.
func Unparse(expr *exprpb.Expr, info *exprpb.SourceInfo) (string, error) {
un := &unparser{info: info}
err := un.visit(expr)
//
// This function optionally takes in one or more UnparserOption to alter the unparsing behavior, such as
// performing word wrapping on expressions.
func Unparse(expr *exprpb.Expr, info *exprpb.SourceInfo, opts ...UnparserOption) (string, error) {
unparserOpts := &unparserOption{
wrapOnColumn: defaultWrapOnColumn,
wrapAfterColumnLimit: defaultWrapAfterColumnLimit,
operatorsToWrapOn: defaultOperatorsToWrapOn,
}
var err error
for _, opt := range opts {
unparserOpts, err = opt(unparserOpts)
if err != nil {
return "", err
}
}
un := &unparser{
info: info,
options: unparserOpts,
}
err = un.visit(expr)
if err != nil {
return "", err
}
@@ -47,8 +67,10 @@ func Unparse(expr *exprpb.Expr, info *exprpb.SourceInfo) (string, error) {
// unparser visits an expression to reconstruct a human-readable string from an AST.
type unparser struct {
str strings.Builder
info *exprpb.SourceInfo
str strings.Builder
info *exprpb.SourceInfo
options *unparserOption
lastWrappedIndex int
}
func (un *unparser) visit(expr *exprpb.Expr) error {
@@ -135,9 +157,8 @@ func (un *unparser) visitCallBinary(expr *exprpb.Expr) error {
if !found {
return fmt.Errorf("cannot unmangle operator: %s", fun)
}
un.str.WriteString(" ")
un.str.WriteString(unmangled)
un.str.WriteString(" ")
un.writeOperatorWithWrapping(fun, unmangled)
return un.visitMaybeNested(rhs, rhsParen)
}
@@ -151,7 +172,8 @@ func (un *unparser) visitCallConditional(expr *exprpb.Expr) error {
if err != nil {
return err
}
un.str.WriteString(" ? ")
un.writeOperatorWithWrapping(operators.Conditional, "?")
// add parens if operand is a conditional itself.
nested = isSamePrecedence(operators.Conditional, args[1]) ||
isComplexOperator(args[1])
@@ -159,6 +181,7 @@ func (un *unparser) visitCallConditional(expr *exprpb.Expr) error {
if err != nil {
return err
}
un.str.WriteString(" : ")
// add parens if operand is a conditional itself.
nested = isSamePrecedence(operators.Conditional, args[2]) ||
@@ -444,3 +467,130 @@ func bytesToOctets(byteVal []byte) string {
}
return b.String()
}
// writeOperatorWithWrapping outputs the operator and inserts a newline for operators configured
// in the unparser options.
func (un *unparser) writeOperatorWithWrapping(fun string, unmangled string) bool {
_, wrapOperatorExists := un.options.operatorsToWrapOn[fun]
lineLength := un.str.Len() - un.lastWrappedIndex + len(fun)
if wrapOperatorExists && lineLength >= un.options.wrapOnColumn {
un.lastWrappedIndex = un.str.Len()
// wrapAfterColumnLimit flag dictates whether the newline is placed
// before or after the operator
if un.options.wrapAfterColumnLimit {
// Input: a && b
// Output: a &&\nb
un.str.WriteString(" ")
un.str.WriteString(unmangled)
un.str.WriteString("\n")
} else {
// Input: a && b
// Output: a\n&& b
un.str.WriteString("\n")
un.str.WriteString(unmangled)
un.str.WriteString(" ")
}
return true
} else {
un.str.WriteString(" ")
un.str.WriteString(unmangled)
un.str.WriteString(" ")
}
return false
}
// Defined defaults for the unparser options
var (
defaultWrapOnColumn = 80
defaultWrapAfterColumnLimit = true
defaultOperatorsToWrapOn = map[string]bool{
operators.LogicalAnd: true,
operators.LogicalOr: true,
}
)
// UnparserOption is a functional option for configuring the output formatting
// of the Unparse function.
type UnparserOption func(*unparserOption) (*unparserOption, error)
// Internal representation of the UnparserOption type
type unparserOption struct {
wrapOnColumn int
operatorsToWrapOn map[string]bool
wrapAfterColumnLimit bool
}
// WrapOnColumn wraps the output expression when its string length exceeds a specified limit
// for operators set by WrapOnOperators function or by default, "&&" and "||" will be wrapped.
//
// Example usage:
//
// Unparse(expr, sourceInfo, WrapOnColumn(40), WrapOnOperators(Operators.LogicalAnd))
//
// This will insert a newline immediately after the logical AND operator for the below example input:
//
// Input:
// 'my-principal-group' in request.auth.claims && request.auth.claims.iat > now - duration('5m')
//
// Output:
// 'my-principal-group' in request.auth.claims &&
// request.auth.claims.iat > now - duration('5m')
func WrapOnColumn(col int) UnparserOption {
return func(opt *unparserOption) (*unparserOption, error) {
if col < 1 {
return nil, fmt.Errorf("Invalid unparser option. Wrap column value must be greater than or equal to 1. Got %v instead", col)
}
opt.wrapOnColumn = col
return opt, nil
}
}
// WrapOnOperators specifies which operators to perform word wrapping on an output expression when its string length
// exceeds the column limit set by WrapOnColumn function.
//
// Word wrapping is supported on non-unary symbolic operators. Refer to operators.go for the full list
//
// This will replace any previously supplied operators instead of merging them.
func WrapOnOperators(symbols ...string) UnparserOption {
return func(opt *unparserOption) (*unparserOption, error) {
opt.operatorsToWrapOn = make(map[string]bool)
for _, symbol := range symbols {
_, found := operators.FindReverse(symbol)
if !found {
return nil, fmt.Errorf("Invalid unparser option. Unsupported operator: %s", symbol)
}
arity := operators.Arity(symbol)
if arity < 2 {
return nil, fmt.Errorf("Invalid unparser option. Unary operators are unsupported: %s", symbol)
}
opt.operatorsToWrapOn[symbol] = true
}
return opt, nil
}
}
// WrapAfterColumnLimit dictates whether to insert a newline before or after the specified operator
// when word wrapping is performed.
//
// Example usage:
//
// Unparse(expr, sourceInfo, WrapOnColumn(40), WrapOnOperators(Operators.LogicalAnd), WrapAfterColumnLimit(false))
//
// This will insert a newline immediately before the logical AND operator for the below example input, ensuring
// that the length of a line never exceeds the specified column limit:
//
// Input:
// 'my-principal-group' in request.auth.claims && request.auth.claims.iat > now - duration('5m')
//
// Output:
// 'my-principal-group' in request.auth.claims
// && request.auth.claims.iat > now - duration('5m')
func WrapAfterColumnLimit(wrapAfter bool) UnparserOption {
return func(opt *unparserOption) (*unparserOption, error) {
opt.wrapAfterColumnLimit = wrapAfter
return opt, nil
}
}

View File

@@ -21,10 +21,11 @@ import (
type Number int32
const (
MinValidNumber Number = 1
FirstReservedNumber Number = 19000
LastReservedNumber Number = 19999
MaxValidNumber Number = 1<<29 - 1
MinValidNumber Number = 1
FirstReservedNumber Number = 19000
LastReservedNumber Number = 19999
MaxValidNumber Number = 1<<29 - 1
DefaultRecursionLimit = 10000
)
// IsValid reports whether the field number is semantically valid.
@@ -55,6 +56,7 @@ const (
errCodeOverflow
errCodeReserved
errCodeEndGroup
errCodeRecursionDepth
)
var (
@@ -112,6 +114,10 @@ func ConsumeField(b []byte) (Number, Type, int) {
// When parsing a group, the length includes the end group marker and
// the end group is verified to match the starting field number.
func ConsumeFieldValue(num Number, typ Type, b []byte) (n int) {
return consumeFieldValueD(num, typ, b, DefaultRecursionLimit)
}
func consumeFieldValueD(num Number, typ Type, b []byte, depth int) (n int) {
switch typ {
case VarintType:
_, n = ConsumeVarint(b)
@@ -126,6 +132,9 @@ func ConsumeFieldValue(num Number, typ Type, b []byte) (n int) {
_, n = ConsumeBytes(b)
return n
case StartGroupType:
if depth < 0 {
return errCodeRecursionDepth
}
n0 := len(b)
for {
num2, typ2, n := ConsumeTag(b)
@@ -140,7 +149,7 @@ func ConsumeFieldValue(num Number, typ Type, b []byte) (n int) {
return n0 - len(b)
}
n = ConsumeFieldValue(num2, typ2, b)
n = consumeFieldValueD(num2, typ2, b, depth-1)
if n < 0 {
return n // forward error code
}

View File

@@ -381,7 +381,7 @@ func (d *Decoder) currentOpenKind() (Kind, byte) {
case '[':
return ListOpen, ']'
}
panic(fmt.Sprintf("Decoder: openStack contains invalid byte %s", string(openCh)))
panic(fmt.Sprintf("Decoder: openStack contains invalid byte %c", openCh))
}
func (d *Decoder) pushOpenStack(ch byte) {

View File

@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !go1.13
// +build !go1.13
package errors

View File

@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build go1.13
// +build go1.13
package errors

View File

@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !protolegacy
// +build !protolegacy
package flags

View File

@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build protolegacy
// +build protolegacy
package flags

View File

@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !go1.12
// +build !go1.12
package impl

View File

@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build go1.12
// +build go1.12
package impl

View File

@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build purego || appengine
// +build purego appengine
package impl

View File

@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !purego && !appengine
// +build !purego,!appengine
package impl

View File

@@ -18,6 +18,7 @@ import (
)
var errDecode = errors.New("cannot parse invalid wire-format data")
var errRecursionDepth = errors.New("exceeded maximum recursion depth")
type unmarshalOptions struct {
flags protoiface.UnmarshalInputFlags
@@ -25,6 +26,7 @@ type unmarshalOptions struct {
FindExtensionByName(field protoreflect.FullName) (protoreflect.ExtensionType, error)
FindExtensionByNumber(message protoreflect.FullName, field protoreflect.FieldNumber) (protoreflect.ExtensionType, error)
}
depth int
}
func (o unmarshalOptions) Options() proto.UnmarshalOptions {
@@ -44,6 +46,7 @@ func (o unmarshalOptions) IsDefault() bool {
var lazyUnmarshalOptions = unmarshalOptions{
resolver: preg.GlobalTypes,
depth: protowire.DefaultRecursionLimit,
}
type unmarshalOutput struct {
@@ -62,6 +65,7 @@ func (mi *MessageInfo) unmarshal(in piface.UnmarshalInput) (piface.UnmarshalOutp
out, err := mi.unmarshalPointer(in.Buf, p, 0, unmarshalOptions{
flags: in.Flags,
resolver: in.Resolver,
depth: in.Depth,
})
var flags piface.UnmarshalOutputFlags
if out.initialized {
@@ -82,6 +86,10 @@ var errUnknown = errors.New("unknown")
func (mi *MessageInfo) unmarshalPointer(b []byte, p pointer, groupTag protowire.Number, opts unmarshalOptions) (out unmarshalOutput, err error) {
mi.init()
opts.depth--
if opts.depth < 0 {
return out, errRecursionDepth
}
if flags.ProtoLegacy && mi.isMessageSet {
return unmarshalMessageSet(mi, b, p, opts)
}

View File

@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build purego || appengine
// +build purego appengine
package impl

View File

@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !purego && !appengine
// +build !purego,!appengine
package impl

View File

@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build purego || appengine
// +build purego appengine
package strs

View File

@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !purego && !appengine
// +build !purego,!appengine
package strs

View File

@@ -52,8 +52,8 @@ import (
// 10. Send out the CL for review and submit it.
const (
Major = 1
Minor = 27
Patch = 1
Minor = 28
Patch = 0
PreRelease = ""
)

View File

@@ -42,18 +42,25 @@ type UnmarshalOptions struct {
FindExtensionByName(field protoreflect.FullName) (protoreflect.ExtensionType, error)
FindExtensionByNumber(message protoreflect.FullName, field protoreflect.FieldNumber) (protoreflect.ExtensionType, error)
}
// RecursionLimit limits how deeply messages may be nested.
// If zero, a default limit is applied.
RecursionLimit int
}
// Unmarshal parses the wire-format message in b and places the result in m.
// The provided message must be mutable (e.g., a non-nil pointer to a message).
func Unmarshal(b []byte, m Message) error {
_, err := UnmarshalOptions{}.unmarshal(b, m.ProtoReflect())
_, err := UnmarshalOptions{RecursionLimit: protowire.DefaultRecursionLimit}.unmarshal(b, m.ProtoReflect())
return err
}
// Unmarshal parses the wire-format message in b and places the result in m.
// The provided message must be mutable (e.g., a non-nil pointer to a message).
func (o UnmarshalOptions) Unmarshal(b []byte, m Message) error {
if o.RecursionLimit == 0 {
o.RecursionLimit = protowire.DefaultRecursionLimit
}
_, err := o.unmarshal(b, m.ProtoReflect())
return err
}
@@ -63,6 +70,9 @@ func (o UnmarshalOptions) Unmarshal(b []byte, m Message) error {
// This method permits fine-grained control over the unmarshaler.
// Most users should use Unmarshal instead.
func (o UnmarshalOptions) UnmarshalState(in protoiface.UnmarshalInput) (protoiface.UnmarshalOutput, error) {
if o.RecursionLimit == 0 {
o.RecursionLimit = protowire.DefaultRecursionLimit
}
return o.unmarshal(in.Buf, in.Message)
}
@@ -86,12 +96,17 @@ func (o UnmarshalOptions) unmarshal(b []byte, m protoreflect.Message) (out proto
Message: m,
Buf: b,
Resolver: o.Resolver,
Depth: o.RecursionLimit,
}
if o.DiscardUnknown {
in.Flags |= protoiface.UnmarshalDiscardUnknown
}
out, err = methods.Unmarshal(in)
} else {
o.RecursionLimit--
if o.RecursionLimit < 0 {
return out, errors.New("exceeded max recursion depth")
}
err = o.unmarshalMessageSlow(b, m)
}
if err != nil {

View File

@@ -3,6 +3,7 @@
// license that can be found in the LICENSE file.
// The protoreflect build tag disables use of fast-path methods.
//go:build !protoreflect
// +build !protoreflect
package proto

View File

@@ -3,6 +3,7 @@
// license that can be found in the LICENSE file.
// The protoreflect build tag disables use of fast-path methods.
//go:build protoreflect
// +build protoreflect
package proto

View File

@@ -53,6 +53,7 @@ type (
FindExtensionByName(field FullName) (ExtensionType, error)
FindExtensionByNumber(message FullName, field FieldNumber) (ExtensionType, error)
}
Depth int
}
unmarshalOutput = struct {
pragma.NoUnkeyedLiterals

View File

@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build purego || appengine
// +build purego appengine
package protoreflect

View File

@@ -41,6 +41,31 @@ import (
// Converting to/from a Value and a concrete Go value panics on type mismatch.
// For example, ValueOf("hello").Int() panics because this attempts to
// retrieve an int64 from a string.
//
// List, Map, and Message Values are called "composite" values.
//
// A composite Value may alias (reference) memory at some location,
// such that changes to the Value updates the that location.
// A composite value acquired with a Mutable method, such as Message.Mutable,
// always references the source object.
//
// For example:
// // Append a 0 to a "repeated int32" field.
// // Since the Value returned by Mutable is guaranteed to alias
// // the source message, modifying the Value modifies the message.
// message.Mutable(fieldDesc).(List).Append(protoreflect.ValueOfInt32(0))
//
// // Assign [0] to a "repeated int32" field by creating a new Value,
// // modifying it, and assigning it.
// list := message.NewField(fieldDesc).(List)
// list.Append(protoreflect.ValueOfInt32(0))
// message.Set(fieldDesc, list)
// // ERROR: Since it is not defined whether Set aliases the source,
// // appending to the List here may or may not modify the message.
// list.Append(protoreflect.ValueOfInt32(0))
//
// Some operations, such as Message.Get, may return an "empty, read-only"
// composite Value. Modifying an empty, read-only value panics.
type Value value
// The protoreflect API uses a custom Value union type instead of interface{}

View File

@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build !purego && !appengine
// +build !purego,!appengine
package protoreflect

View File

@@ -103,6 +103,7 @@ type UnmarshalInput = struct {
FindExtensionByName(field protoreflect.FullName) (protoreflect.ExtensionType, error)
FindExtensionByNumber(message protoreflect.FullName, field protoreflect.FieldNumber) (protoreflect.ExtensionType, error)
}
Depth int
}
// UnmarshalOutput is output from the Unmarshal method.

18
vendor/modules.txt vendored
View File

@@ -99,7 +99,7 @@ github.com/PuerkitoBio/purell
# github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 => github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578
## explicit
github.com/PuerkitoBio/urlesc
# github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20220209173558-ad29539cd2e9 => github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20220209173558-ad29539cd2e9
# github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20220418222510-f25a4f6275ed => github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20220418222510-f25a4f6275ed
## explicit; go 1.16
github.com/antlr/antlr4/runtime/Go/antlr
# github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e => github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e
@@ -416,7 +416,7 @@ github.com/google/cadvisor/utils/sysfs
github.com/google/cadvisor/utils/sysinfo
github.com/google/cadvisor/version
github.com/google/cadvisor/watcher
# github.com/google/cel-go v0.11.2 => github.com/google/cel-go v0.11.2
# github.com/google/cel-go v0.12.3 => github.com/google/cel-go v0.12.3
## explicit; go 1.17
github.com/google/cel-go/cel
github.com/google/cel-go/checker
@@ -1236,7 +1236,7 @@ google.golang.org/appengine/internal/modules
google.golang.org/appengine/internal/remote_api
google.golang.org/appengine/internal/urlfetch
google.golang.org/appengine/urlfetch
# google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6 => google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6
# google.golang.org/genproto v0.0.0-20220502173005-c8bf987b8c21 => google.golang.org/genproto v0.0.0-20220502173005-c8bf987b8c21
## explicit; go 1.15
google.golang.org/genproto/googleapis/api/annotations
google.golang.org/genproto/googleapis/api/expr/v1alpha1
@@ -1299,8 +1299,8 @@ google.golang.org/grpc/serviceconfig
google.golang.org/grpc/stats
google.golang.org/grpc/status
google.golang.org/grpc/tap
# google.golang.org/protobuf v1.27.1 => google.golang.org/protobuf v1.27.1
## explicit; go 1.9
# google.golang.org/protobuf v1.28.0 => google.golang.org/protobuf v1.28.0
## explicit; go 1.11
google.golang.org/protobuf/encoding/protojson
google.golang.org/protobuf/encoding/prototext
google.golang.org/protobuf/encoding/protowire
@@ -2580,7 +2580,7 @@ sigs.k8s.io/yaml
# github.com/PuerkitoBio/urlesc => github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578
# github.com/ajstarks/svgo => github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af
# github.com/antihax/optional => github.com/antihax/optional v1.0.0
# github.com/antlr/antlr4/runtime/Go/antlr => github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20220209173558-ad29539cd2e9
# github.com/antlr/antlr4/runtime/Go/antlr => github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20220418222510-f25a4f6275ed
# github.com/armon/circbuf => github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e
# github.com/armon/go-socks5 => github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5
# github.com/asaskevich/govalidator => github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a
@@ -2678,7 +2678,7 @@ sigs.k8s.io/yaml
# github.com/golangplus/testing => github.com/golangplus/testing v1.0.0
# github.com/google/btree => github.com/google/btree v1.0.1
# github.com/google/cadvisor => github.com/google/cadvisor v0.44.1
# github.com/google/cel-go => github.com/google/cel-go v0.11.2
# github.com/google/cel-go => github.com/google/cel-go v0.12.3
# github.com/google/gnostic => github.com/google/gnostic v0.5.7-v3refs
# github.com/google/go-cmp => github.com/google/go-cmp v0.5.6
# github.com/google/gofuzz => github.com/google/gofuzz v1.1.0
@@ -2838,10 +2838,10 @@ sigs.k8s.io/yaml
# gonum.org/v1/plot => gonum.org/v1/plot v0.0.0-20190515093506-e2840ee46a6b
# google.golang.org/api => google.golang.org/api v0.60.0
# google.golang.org/appengine => google.golang.org/appengine v1.6.7
# google.golang.org/genproto => google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6
# google.golang.org/genproto => google.golang.org/genproto v0.0.0-20220502173005-c8bf987b8c21
# google.golang.org/grpc => google.golang.org/grpc v1.47.0
# google.golang.org/grpc/cmd/protoc-gen-go-grpc => google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0
# google.golang.org/protobuf => google.golang.org/protobuf v1.27.1
# google.golang.org/protobuf => google.golang.org/protobuf v1.28.0
# gopkg.in/alecthomas/kingpin.v2 => gopkg.in/alecthomas/kingpin.v2 v2.2.6
# gopkg.in/check.v1 => gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f
# gopkg.in/errgo.v2 => gopkg.in/errgo.v2 v2.1.0