Skip to content

Commit

Permalink
ast/index: collect rule head values at build time
Browse files Browse the repository at this point in the history
Signed-off-by: Stephan Renatus <[email protected]>
  • Loading branch information
srenatus committed Oct 19, 2021
1 parent d8157af commit 51e1cbc
Showing 1 changed file with 31 additions and 43 deletions.
74 changes: 31 additions & 43 deletions ast/index.go
Original file line number Diff line number Diff line change
Expand Up @@ -104,13 +104,11 @@ func (i *baseDocEqIndex) Build(rules []*Rule) bool {
// Insert rule into trie with (insertion order, priority order)
// tuple. Retaining the insertion order allows us to return rules
// in the order they were passed to this function.
node.rules = append(node.rules, &ruleNode{[...]int{idx, prio}, rule})
node.append([...]int{idx, prio}, rule)
prio++
return false
})

}

return true
}

Expand All @@ -126,27 +124,24 @@ func (i *baseDocEqIndex) Lookup(resolver ValueResolver) (*IndexResult, error) {
result := NewIndexResult(i.kind)
result.Default = i.defaultRule
result.Rules = make([]*Rule, 0, len(tr.ordering))
ee := newEarlyExit(i.kind)

for _, pos := range tr.ordering {
sort.Slice(tr.unordered[pos], func(i, j int) bool {
return tr.unordered[pos][i].prio[1] < tr.unordered[pos][j].prio[1]
})
nodes := tr.unordered[pos]
root := nodes[0].rule
ee.add(root.Head.Value)

result.Rules = append(result.Rules, root)
if len(nodes) > 1 {
result.Else[root] = make([]*Rule, len(nodes)-1)
for i := 1; i < len(nodes); i++ {
result.Else[root][i-1] = nodes[i].rule
ee.add(nodes[i].rule.Head.Value)
}
}
}
if len(result.Rules) > 1 {
result.EarlyExit = ee.possible()
result.EarlyExit = tr.values.Len() == 1 && tr.values.Slice()[0].IsGround()
}
return result, nil
}
Expand Down Expand Up @@ -186,9 +181,7 @@ type ruleWalker struct {

func (r *ruleWalker) Do(x interface{}) trieWalker {
tn := x.(*trieNode)
for _, rn := range tn.rules {
r.result.Add(rn)
}
r.result.Add(tn)
return r
}

Expand Down Expand Up @@ -393,25 +386,33 @@ type trieWalker interface {
type trieTraversalResult struct {
unordered map[int][]*ruleNode
ordering []int
values Set
}

func newTrieTraversalResult() *trieTraversalResult {
return &trieTraversalResult{
unordered: map[int][]*ruleNode{},
values: NewSet(),
}
}

func (tr *trieTraversalResult) Add(node *ruleNode) {
root := node.prio[0]
nodes, ok := tr.unordered[root]
if !ok {
tr.ordering = append(tr.ordering, root)
func (tr *trieTraversalResult) Add(t *trieNode) {
for _, node := range t.rules {
root := node.prio[0]
nodes, ok := tr.unordered[root]
if !ok {
tr.ordering = append(tr.ordering, root)
}
tr.unordered[root] = append(nodes, node)
}
if t.values != nil {
t.values.Foreach(func(v *Term) { tr.values.Add(v) })
}
tr.unordered[root] = append(nodes, node)
}

type trieNode struct {
ref Ref
values Set
mappers []*valueMapper
next *trieNode
any *trieNode
Expand Down Expand Up @@ -456,6 +457,19 @@ func (node *trieNode) String() string {
return strings.Join(flags, " ")
}

func (node *trieNode) append(prio [2]int, rule *Rule) {
node.rules = append(node.rules, &ruleNode{prio, rule})

if node.values != nil {
node.values.Add(rule.Head.Value)
return
}

if node.values == nil && rule.Head.DocKind() == CompleteDoc {
node.values = NewSet(rule.Head.Value)
}
}

type ruleNode struct {
prio [2]int
rule *Rule
Expand Down Expand Up @@ -509,9 +523,7 @@ func (node *trieNode) Traverse(resolver ValueResolver, tr *trieTraversalResult)
return nil
}

for i := range node.rules {
tr.Add(node.rules[i])
}
tr.Add(node)

return node.next.traverse(resolver, tr)
}
Expand Down Expand Up @@ -854,27 +866,3 @@ func stringSliceToArray(s []string) *Array {
}
return NewArray(arr...)
}

type earlyExit struct {
values Set
}

func newEarlyExit(kind DocKind) earlyExit {
switch kind {
case CompleteDoc:
return earlyExit{values: NewSet()}
}
return earlyExit{}
}
func (e earlyExit) add(t *Term) {
if e.values != nil {
e.values.Add(t)
}
}

func (e earlyExit) possible() bool {
if e.values != nil {
return e.values.Len() == 1 && e.values.Slice()[0].IsGround()
}
return false
}

0 comments on commit 51e1cbc

Please sign in to comment.