diff --git a/internal/runtime/internal/controller/loader.go b/internal/runtime/internal/controller/loader.go index fae75f5865..279260526d 100644 --- a/internal/runtime/internal/controller/loader.go +++ b/internal/runtime/internal/controller/loader.go @@ -149,7 +149,9 @@ func (l *Loader) Apply(options ApplyOptions) diag.Diagnostics { l.cm.controllerEvaluation.Set(1) defer l.cm.controllerEvaluation.Set(0) - l.cache.SetScope(options.ArgScope) + if options.ArgScope != nil { + l.cache.UpdateScopeVariables(options.ArgScope.Variables) + } for key, value := range options.Args { l.cache.CacheModuleArgument(key, value) @@ -166,7 +168,7 @@ func (l *Loader) Apply(options ApplyOptions) diag.Diagnostics { var ( components = make([]ComponentNode, 0) - componentIDs = make([]ComponentID, 0) + componentIDs = make(map[string]ComponentID) services = make([]*ServiceNode, 0, len(l.services)) ) @@ -200,7 +202,7 @@ func (l *Loader) Apply(options ApplyOptions) diag.Diagnostics { switch n := n.(type) { case ComponentNode: components = append(components, n) - componentIDs = append(componentIDs, n.ID()) + componentIDs[n.ID().String()] = n.ID() if err = l.evaluate(logger, n); err != nil { var evalDiags diag.Diagnostics @@ -260,7 +262,13 @@ func (l *Loader) Apply(options ApplyOptions) diag.Diagnostics { l.componentNodes = components l.serviceNodes = services l.graph = &newGraph - l.cache.SyncIDs(componentIDs) + err := l.cache.SyncIDs(componentIDs) + if err != nil { + diags.Add(diag.Diagnostic{ + Severity: diag.SeverityLevelError, + Message: fmt.Sprintf("Failed to update the internal cache with the new list of components: %s", err), + }) + } l.blocks = options.ComponentBlocks if l.globals.OnExportsChange != nil && l.cache.ExportChangeIndex() != l.moduleExportIndex { l.moduleExportIndex = l.cache.ExportChangeIndex() @@ -615,7 +623,7 @@ func (l *Loader) wireGraphEdges(g *dag.Graph) diag.Diagnostics { // Finally, wire component references. l.cache.mut.RLock() - refs, nodeDiags := ComponentReferences(n, g, l.log, l.cache.scope, l.globals.MinStability) + refs, nodeDiags := ComponentReferences(n, g, l.log, l.cache.GetScope(), l.globals.MinStability) l.cache.mut.RUnlock() for _, ref := range refs { g.AddEdge(dag.Edge{From: n, To: ref.Target}) @@ -645,7 +653,7 @@ func (l *Loader) wireCustomComponentNode(g *dag.Graph, cc *CustomComponentNode) // Variables returns the Variables the Loader exposes for other components to // reference. func (l *Loader) Variables() map[string]interface{} { - return l.cache.BuildContext().Variables + return l.cache.GetScope().Variables } // Components returns the current set of loaded components. @@ -703,7 +711,10 @@ func (l *Loader) EvaluateDependants(ctx context.Context, updatedNodes []*QueuedN switch parentNode := parent.Node.(type) { case ComponentNode: // Make sure we're in-sync with the current exports of parent. - l.cache.CacheExports(parentNode.ID(), parentNode.Exports()) + err := l.cache.CacheExports(parentNode.ID(), parentNode.Exports()) + if err != nil { + level.Error(l.log).Log("msg", "failed to cache exports during evaluation", "err", err) + } case *ImportConfigNode: // Update the scope with the imported content. l.componentNodeManager.customComponentReg.updateImportContent(parentNode) @@ -780,7 +791,7 @@ func (l *Loader) concurrentEvalFn(n dag.Node, spanCtx context.Context, tracer tr var err error switch n := n.(type) { case BlockNode: - ectx := l.cache.BuildContext() + ectx := l.cache.GetScope() // RLock before evaluate to prevent Evaluating while the config is being reloaded l.mut.RLock() @@ -819,7 +830,7 @@ func (l *Loader) concurrentEvalFn(n dag.Node, spanCtx context.Context, tracer tr // evaluate constructs the final context for the BlockNode and // evaluates it. mut must be held when calling evaluate. func (l *Loader) evaluate(logger log.Logger, bn BlockNode) error { - ectx := l.cache.BuildContext() + ectx := l.cache.GetScope() err := bn.Evaluate(ectx) return l.postEvaluate(logger, bn, err) } @@ -829,12 +840,18 @@ func (l *Loader) evaluate(logger log.Logger, bn BlockNode) error { func (l *Loader) postEvaluate(logger log.Logger, bn BlockNode, err error) error { switch c := bn.(type) { case ComponentNode: - // Always update the cache both the arguments and exports, since both might - // change when a component gets re-evaluated. We also want to cache the arguments and exports in case of an error - l.cache.CacheArguments(c.ID(), c.Arguments()) - l.cache.CacheExports(c.ID(), c.Exports()) + // Always update the cache both the exports, since that it might change when a component gets re-evaluated. + // We also want to cache it in case of an error + err2 := l.cache.CacheExports(c.ID(), c.Exports()) + if err2 != nil { + level.Error(logger).Log("msg", "failed to cache exports after evaluation", "err", err2) + // Don't mask the previous evaluation error. + if err == nil { + return err2 + } + } case *ArgumentConfigNode: - if _, found := l.cache.moduleArguments[c.Label()]; !found { + if _, found := l.cache.GetModuleArgument(c.Label()); !found { if c.Optional() { l.cache.CacheModuleArgument(c.Label(), c.Default()) } else { diff --git a/internal/runtime/internal/controller/value_cache.go b/internal/runtime/internal/controller/value_cache.go index cdce5825a7..b438226e7a 100644 --- a/internal/runtime/internal/controller/value_cache.go +++ b/internal/runtime/internal/controller/value_cache.go @@ -1,6 +1,7 @@ package controller import ( + "fmt" "reflect" "sync" @@ -8,69 +9,72 @@ import ( "github.com/grafana/alloy/syntax/vm" ) -// valueCache caches component arguments and exports to expose as variables for -// Alloy expressions. -// -// The current state of valueCache can then be built into a *vm.Scope for other -// components to be evaluated. +// This special keyword is used to expose the argument values to the custom components. +const argumentLabel = "argument" + +// valueCache caches exports to expose as variables for Alloy expressions. +// The exports are stored directly in the scope which is used to evaluate Alloy expressions. type valueCache struct { mut sync.RWMutex - components map[string]ComponentID // NodeID -> ComponentID - args map[string]interface{} // NodeID -> component arguments value - exports map[string]interface{} // NodeID -> component exports value - moduleArguments map[string]any // key -> module arguments value - moduleExports map[string]any // name -> value for the value of module exports - moduleChangedIndex int // Everytime a change occurs this is incremented - scope *vm.Scope // scope provides additional context for the nodes in the module + componentIds map[string]ComponentID + moduleExports map[string]any // name -> value for the value of module exports + moduleChangedIndex int // Everytime a change occurs this is incremented + scope *vm.Scope // scope provides additional context for the nodes in the module } // newValueCache creates a new ValueCache. func newValueCache() *valueCache { return &valueCache{ - components: make(map[string]ComponentID), - args: make(map[string]interface{}), - exports: make(map[string]interface{}), - moduleArguments: make(map[string]any), - moduleExports: make(map[string]any), + componentIds: make(map[string]ComponentID, 0), + moduleExports: make(map[string]any), + scope: vm.NewScope(make(map[string]any)), } } -func (vc *valueCache) SetScope(scope *vm.Scope) { - vc.mut.Lock() - defer vc.mut.Unlock() - vc.scope = scope -} - -// CacheArguments will cache the provided arguments by the given id. args may -// be nil to store an empty object. -func (vc *valueCache) CacheArguments(id ComponentID, args component.Arguments) { +// UpdateScopeVariables updates the Variables map of the scope with a deep copy of the provided map. +func (vc *valueCache) UpdateScopeVariables(variables map[string]any) { + if variables == nil { + return + } vc.mut.Lock() defer vc.mut.Unlock() - - nodeID := id.String() - vc.components[nodeID] = id - - var argsVal interface{} = make(map[string]interface{}) - if args != nil { - argsVal = args - } - vc.args[nodeID] = argsVal + vc.scope.Variables = deepCopyMap(variables) } // CacheExports will cache the provided exports using the given id. exports may // be nil to store an empty object. -func (vc *valueCache) CacheExports(id ComponentID, exports component.Exports) { +func (vc *valueCache) CacheExports(id ComponentID, exports component.Exports) error { vc.mut.Lock() defer vc.mut.Unlock() - nodeID := id.String() - vc.components[nodeID] = id + variables := vc.scope.Variables + // Build nested maps. + for i := 0; i < len(id)-1; i++ { + t := id[i] + if _, ok := variables[t]; !ok { + variables[t] = make(map[string]any) + } else if _, ok := variables[t].(map[string]any); !ok { + return fmt.Errorf("expected a map but found a value for %q when trying to cache the export for %s", t, id.String()) + } + variables = variables[t].(map[string]any) + } - var exportsVal interface{} = make(map[string]interface{}) + var exportsVal any = make(map[string]any) if exports != nil { exportsVal = exports } - vc.exports[nodeID] = exportsVal + variables[id[len(id)-1]] = exportsVal + return nil +} + +func (vc *valueCache) GetModuleArgument(key string) (any, bool) { + vc.mut.RLock() + defer vc.mut.RUnlock() + if _, ok := vc.scope.Variables[argumentLabel]; !ok { + return nil, false + } + v, exist := vc.scope.Variables[argumentLabel].(map[string]any)[key] + return v, exist } // CacheModuleArgument will cache the provided exports using the given id. @@ -78,11 +82,12 @@ func (vc *valueCache) CacheModuleArgument(key string, value any) { vc.mut.Lock() defer vc.mut.Unlock() - if value == nil { - vc.moduleArguments[key] = nil - } else { - vc.moduleArguments[key] = value + if _, ok := vc.scope.Variables[argumentLabel]; !ok { + vc.scope.Variables[argumentLabel] = make(map[string]any) } + keyMap := make(map[string]any) + keyMap["value"] = value + vc.scope.Variables[argumentLabel].(map[string]any)[key] = keyMap } // CacheModuleExportValue saves the value to the map @@ -130,135 +135,98 @@ func (vc *valueCache) ExportChangeIndex() int { return vc.moduleChangedIndex } -// SyncIDs will remove any cached values for any Component ID which is not in -// ids. SyncIDs should be called with the current set of components after the -// graph is updated. -func (vc *valueCache) SyncIDs(ids []ComponentID) { - expectMap := make(map[string]ComponentID, len(ids)) - for _, id := range ids { - expectMap[id.String()] = id - } - +// SyncIDs will remove any cached values for any Component ID from the graph which is not in ids. +// SyncIDs should be called with the current set of components after the graph is updated. +func (vc *valueCache) SyncIDs(ids map[string]ComponentID) error { vc.mut.Lock() defer vc.mut.Unlock() - for id := range vc.components { - if _, keep := expectMap[id]; keep { - continue + // Find the components that should be removed. + cleanupIds := make([]ComponentID, 0) + for name, id := range vc.componentIds { + if _, exist := ids[name]; !exist { + cleanupIds = append(cleanupIds, id) } - delete(vc.components, id) - delete(vc.args, id) - delete(vc.exports, id) } -} -// SyncModuleArgs will remove any cached values for any args no longer in the map. -func (vc *valueCache) SyncModuleArgs(args map[string]any) { - vc.mut.Lock() - defer vc.mut.Unlock() - - for id := range vc.moduleArguments { - if _, keep := args[id]; keep { - continue + // Remove the component exports from the scope. + for _, id := range cleanupIds { + err := cleanup(vc.scope.Variables, id, 0) + if err != nil { + return fmt.Errorf("failed to sync component %s: %w", id.String(), err) } - delete(vc.moduleArguments, id) } + vc.componentIds = ids + return nil } -// BuildContext builds a vm.Scope based on the current set of cached values. -// The arguments and exports for the same ID are merged into one object. -func (vc *valueCache) BuildContext() *vm.Scope { - vc.mut.RLock() - defer vc.mut.RUnlock() - - scope := vm.NewScope(make(map[string]interface{})) +// cleanup removes the ComponentID path from the map +func cleanup(m map[string]any, id ComponentID, index int) error { - // First, partition components by Alloy block name. - var componentsByBlockName = make(map[string][]ComponentID) - for _, id := range vc.components { - blockName := id[0] - componentsByBlockName[blockName] = append(componentsByBlockName[blockName], id) + if _, ok := m[id[index]]; !ok { + return nil } - // Then, convert each partition into a single value. - for blockName, ids := range componentsByBlockName { - scope.Variables[blockName] = vc.buildValue(ids, 1) + if index == len(id)-1 { + delete(m, id[index]) // Remove the component's exports. + return nil } - // Add module arguments to the scope. - if len(vc.moduleArguments) > 0 { - scope.Variables["argument"] = make(map[string]any) + if _, ok := m[id[index]].(map[string]any); !ok { + return fmt.Errorf("expected a map but found a value for %q", id[index]) } - for key, value := range vc.moduleArguments { - keyMap := make(map[string]any) - keyMap["value"] = value + nextM := m[id[index]].(map[string]any) - switch args := scope.Variables["argument"].(type) { - case map[string]any: - args[key] = keyMap - } + err := cleanup(nextM, id, index+1) + if err != nil { + return err } - if vc.scope != nil { - // Merges the current scope with the one built from the components at this level. - scope.Variables = deepMergeMaps(vc.scope.Variables, scope.Variables) + // Delete if the map at this level is empty. + // If you only have one Prometheus component and you remove it, it will cleanup the full Prometheus path. + // If you have one Prometheus relabel and one Prometheus scrape, and you remove the relabel, it will cleanup the relabel path. + if len(nextM) == 0 { + delete(m, id[index]) } - - return scope + return nil } -// buildValue recursively converts the set of user components into a single -// value. offset is used to determine which element in the userComponentName -// we're looking at. -func (vc *valueCache) buildValue(from []ComponentID, offset int) interface{} { - // We can't recurse anymore; return the node directly. - if len(from) == 1 && offset >= len(from[0]) { - name := from[0].String() - - // TODO(rfratto): should we allow arguments to be returned so users can - // reference arguments as well as exports? - exports, ok := vc.exports[name] - if !ok { - exports = make(map[string]interface{}) - } - return exports - } - - attrs := make(map[string]interface{}) +// SyncModuleArgs will remove any cached values for any args no longer in the map. +func (vc *valueCache) SyncModuleArgs(args map[string]any) { + vc.mut.Lock() + defer vc.mut.Unlock() - // First, partition the components by their label. - var componentsByLabel = make(map[string][]ComponentID) - for _, id := range from { - blockName := id[offset] - componentsByLabel[blockName] = append(componentsByLabel[blockName], id) + if _, ok := vc.scope.Variables[argumentLabel]; !ok { + return } - // Then, convert each partition into a single value. - for label, ids := range componentsByLabel { - attrs[label] = vc.buildValue(ids, offset+1) + argsMap := vc.scope.Variables[argumentLabel].(map[string]any) + for arg := range argsMap { + if _, ok := args[arg]; !ok { + delete(argsMap, arg) + } + } + if len(argsMap) == 0 { + delete(vc.scope.Variables, argumentLabel) } - return attrs } -// deepMergeMaps merges two maps. It uses the values of map2 in case of conflict. -func deepMergeMaps(map1, map2 map[string]any) map[string]any { - merged := make(map[string]any, len(map1)+len(map2)) - - for key, value := range map1 { - merged[key] = value - } +// GetScope returns the current scope. +func (vc *valueCache) GetScope() *vm.Scope { + vc.mut.RLock() + defer vc.mut.RUnlock() + return vm.NewScope(deepCopyMap(vc.scope.Variables)) +} - for key, value := range map2 { - if existingValue, exists := merged[key]; exists { - if map1Value, ok1 := existingValue.(map[string]any); ok1 { - if map2Value, ok2 := value.(map[string]any); ok2 { - merged[key] = deepMergeMaps(map1Value, map2Value) - continue - } - } +func deepCopyMap(original map[string]any) map[string]any { + newMap := make(map[string]any, len(original)) + for key, value := range original { + switch v := value.(type) { + case map[string]any: + newMap[key] = deepCopyMap(v) + default: + newMap[key] = v } - merged[key] = value } - - return merged + return newMap } diff --git a/internal/runtime/internal/controller/value_cache_test.go b/internal/runtime/internal/controller/value_cache_test.go index d481046371..ff104453c0 100644 --- a/internal/runtime/internal/controller/value_cache_test.go +++ b/internal/runtime/internal/controller/value_cache_test.go @@ -54,15 +54,12 @@ func TestValueCache(t *testing.T) { // For now, only exports are placed in generated objects, which is why the // bar values are empty and the foo object only contains the exports. - vc.CacheArguments(ComponentID{"foo"}, fooArgs{Something: true}) - vc.CacheExports(ComponentID{"foo"}, fooExports{SomethingElse: true}) - vc.CacheArguments(ComponentID{"bar", "label_a"}, barArgs{Number: 12}) - vc.CacheArguments(ComponentID{"bar", "label_b"}, barArgs{Number: 34}) + require.NoError(t, vc.CacheExports(ComponentID{"foo"}, fooExports{SomethingElse: true})) - res := vc.BuildContext() + res := vc.GetScope() var ( - expectKeys = []string{"foo", "bar"} + expectKeys = []string{"foo"} actualKeys []string ) for varName := range res.Variables { @@ -73,12 +70,7 @@ func TestValueCache(t *testing.T) { type object = map[string]interface{} expectFoo := fooExports{SomethingElse: true} - expectBar := object{ - "label_a": object{}, // no exports for bar - "label_b": object{}, // no exports for bar - } require.Equal(t, expectFoo, res.Variables["foo"]) - require.Equal(t, expectBar, res.Variables["bar"]) } func TestExportValueCache(t *testing.T) { @@ -157,37 +149,36 @@ func TestModuleArgumentCache(t *testing.T) { vc.CacheModuleArgument("arg", tc.argValue) // Build the scope and validate it - res := vc.BuildContext() + res := vc.GetScope() expected := map[string]any{"arg": map[string]any{"value": tc.argValue}} require.Equal(t, expected, res.Variables["argument"]) // Sync arguments where the arg shouldn't change syncArgs := map[string]any{"arg": tc.argValue} vc.SyncModuleArgs(syncArgs) - res = vc.BuildContext() + res = vc.GetScope() require.Equal(t, expected, res.Variables["argument"]) // Sync arguments where the arg should clear out syncArgs = map[string]any{} vc.SyncModuleArgs(syncArgs) - res = vc.BuildContext() + res = vc.GetScope() require.Equal(t, map[string]any{}, res.Variables) }) } } -func TestBuildContextWithScope(t *testing.T) { +func TestScope(t *testing.T) { vc := newValueCache() - scope := vm.NewScope( + vc.scope = vm.NewScope( map[string]any{ "test": map[string]any{ "scope": barArgs{Number: 13}, }, }, ) - vc.SetScope(scope) - vc.CacheExports(ComponentID{"foo", "bar"}, barArgs{Number: 12}) - res := vc.BuildContext() + require.NoError(t, vc.CacheExports(ComponentID{"foo", "bar"}, barArgs{Number: 12})) + res := vc.GetScope() expected := map[string]any{ "test": map[string]any{ @@ -204,18 +195,17 @@ func TestBuildContextWithScope(t *testing.T) { require.Equal(t, expected, res.Variables) } -func TestBuildContextWithScopeConflict(t *testing.T) { +func TestScopeSameNamespace(t *testing.T) { vc := newValueCache() - scope := vm.NewScope( + vc.scope = vm.NewScope( map[string]any{ "test": map[string]any{ "scope": barArgs{Number: 13}, }, }, ) - vc.SetScope(scope) - vc.CacheExports(ComponentID{"test", "bar"}, barArgs{Number: 12}) - res := vc.BuildContext() + require.NoError(t, vc.CacheExports(ComponentID{"test", "bar"}, barArgs{Number: 12})) + res := vc.GetScope() expected := map[string]any{ "test": map[string]any{ @@ -230,18 +220,17 @@ func TestBuildContextWithScopeConflict(t *testing.T) { require.Equal(t, expected, res.Variables) } -func TestBuildContextWithScopeOverride(t *testing.T) { +func TestScopeOverride(t *testing.T) { vc := newValueCache() - scope := vm.NewScope( + vc.scope = vm.NewScope( map[string]any{ "test": map[string]any{ "scope": barArgs{Number: 13}, }, }, ) - vc.SetScope(scope) - vc.CacheExports(ComponentID{"test", "scope"}, barArgs{Number: 12}) - res := vc.BuildContext() + require.NoError(t, vc.CacheExports(ComponentID{"test", "scope"}, barArgs{Number: 12})) + res := vc.GetScope() expected := map[string]any{ "test": map[string]any{ @@ -251,20 +240,24 @@ func TestBuildContextWithScopeOverride(t *testing.T) { }, } require.Equal(t, expected, res.Variables) +} - originalScope := vm.NewScope( +func TestScopePathOverrideError(t *testing.T) { + vc := newValueCache() + vc.scope = vm.NewScope( map[string]any{ - "test": map[string]any{ - "scope": barArgs{Number: 13}, - }, + "test": barArgs{Number: 13}, }, ) - require.Equal(t, vc.scope, originalScope) // ensure that the original scope is not modified after building the context + require.ErrorContains(t, + vc.CacheExports(ComponentID{"test", "scope"}, barArgs{Number: 12}), + "expected a map but found a value for \"test\" when trying to cache the export for test.scope", + ) } -func TestScopeMergeMoreNesting(t *testing.T) { +func TestScopeComplex(t *testing.T) { vc := newValueCache() - scope := vm.NewScope( + vc.scope = vm.NewScope( map[string]any{ "test": map[string]any{ "cp1": map[string]any{ @@ -274,10 +267,9 @@ func TestScopeMergeMoreNesting(t *testing.T) { }, }, ) - vc.SetScope(scope) - vc.CacheExports(ComponentID{"test", "cp1", "foo"}, barArgs{Number: 12}) - vc.CacheExports(ComponentID{"test", "cp1", "bar", "fizz"}, barArgs{Number: 2}) - res := vc.BuildContext() + require.NoError(t, vc.CacheExports(ComponentID{"test", "cp1", "foo"}, barArgs{Number: 12})) + require.NoError(t, vc.CacheExports(ComponentID{"test", "cp1", "bar", "fizz"}, barArgs{Number: 2})) + res := vc.GetScope() expected := map[string]any{ "test": map[string]any{ @@ -293,3 +285,72 @@ func TestScopeMergeMoreNesting(t *testing.T) { } require.Equal(t, expected, res.Variables) } + +func TestSyncIds(t *testing.T) { + vc := newValueCache() + vc.scope = vm.NewScope( + map[string]any{ + "test": map[string]any{ + "cp1": map[string]any{ + "scope": barArgs{Number: 13}, + }, + "cp2": barArgs{Number: 12}, + }, + "test2": map[string]any{ + "cp1": map[string]any{ + "scope": barArgs{Number: 13}, + }, + }, + "test3": 5, + }, + ) + require.NoError(t, vc.CacheExports(ComponentID{"test", "cp1", "bar", "fizz"}, barArgs{Number: 2})) + originalIds := map[string]ComponentID{ + "test.cp1": {"test", "cp1"}, + "test.cp2": {"test", "cp2"}, + "test2.cp1": {"test2", "cp1"}, + "test3": {"test3"}, + } + require.NoError(t, vc.SyncIDs(originalIds)) + require.Equal(t, originalIds, vc.componentIds) + + newIds := map[string]ComponentID{ + "test.cp1": {"test", "cp1"}, + "test2.cp1": {"test2", "cp1"}, + } + require.NoError(t, vc.SyncIDs(newIds)) + require.Equal(t, newIds, vc.componentIds) + expected := map[string]any{ + "test": map[string]any{ + "cp1": map[string]any{ + "scope": barArgs{Number: 13}, + "bar": map[string]any{ + "fizz": barArgs{Number: 2}, + }, + }, + }, + "test2": map[string]any{ + "cp1": map[string]any{ + "scope": barArgs{Number: 13}, + }, + }, + } + res := vc.GetScope() + require.Equal(t, expected, res.Variables) +} + +func TestSyncIdsError(t *testing.T) { + vc := newValueCache() + componentID := ComponentID{"test", "cp1", "bar", "fizz"} + ids := map[string]ComponentID{"test.cp1.bar.fizz": componentID} + require.NoError(t, vc.CacheExports(componentID, barArgs{Number: 2})) + require.NoError(t, vc.SyncIDs(ids)) + require.Equal(t, ids, vc.componentIds) + + // Modify the map manually + vc.componentIds = map[string]ComponentID{"test.cp1.bar.fizz.foo": {"test", "cp1", "bar", "fizz", "foo"}} + require.ErrorContains(t, + vc.SyncIDs(ids), + "failed to sync component test.cp1.bar.fizz.foo: expected a map but found a value for \"fizz\"", + ) +} diff --git a/syntax/vm/vm.go b/syntax/vm/vm.go index 9a61f395b8..8bb7d6a999 100644 --- a/syntax/vm/vm.go +++ b/syntax/vm/vm.go @@ -472,12 +472,13 @@ func NewScope(variables map[string]interface{}) *Scope { // Lookup looks up a named identifier from the scope and the stdlib. func (s *Scope) Lookup(name string) (interface{}, bool) { - // Traverse the scope first, then fall back to stdlib. + // Check the scope first. if s != nil { if val, ok := s.Variables[name]; ok { return val, true } } + // Falls back to the stdlib. if ident, ok := stdlib.Identifiers[name]; ok { return ident, true }