diff --git a/pkg/corset/allocation.go b/pkg/corset/allocation.go index 20b68185..a58c3666 100644 --- a/pkg/corset/allocation.go +++ b/pkg/corset/allocation.go @@ -77,13 +77,11 @@ func (r *Register) Deactivate() { // RegisterSource provides necessary information about source-level columns // allocated to a given register. type RegisterSource struct { - // Module in which source-level column resides - module string - // Perspective containing source-level column. If none, then this is - // assigned "". - perspective string - // Name of source-level column. - name string + // Context is a prefix of name which, when they differ, indicates a virtual + // column (i.e. one which is subject to register allocation). + context util.Path + // Fully qualified (i.e. absolute) name of source-level column. + name util.Path // Length multiplier of source-level column. multiplier uint // Underlying datatype of the source-level column. @@ -94,6 +92,19 @@ type RegisterSource struct { computed bool } +// IsVirtual indicates whether or not this is a "virtual" column. That is, +// something which is subject to register allocation (i.e. because it is +// declared in a perspective). +func (p *RegisterSource) IsVirtual() bool { + return !p.name.Parent().Equals(p.context) +} + +// Perspective returns the name of the "virtual perspective" in which this +// column exists. +func (p *RegisterSource) Perspective() string { + return p.name.Parent().Slice(p.context.Depth()).String() +} + // RegisterAllocation is a generic interface to support different "regsiter // allocation" algorithms. More specifically, register allocation is the // process of allocating columns to their underlying HIR columns (a.k.a @@ -186,20 +197,24 @@ func NewRegisterAllocator(allocation RegisterAllocation) *RegisterAllocator { // Identify all perspectives for iter := allocation.Registers(); iter.HasNext(); { regInfo := allocation.Register(iter.Next()) + regSource := regInfo.Sources[0] + // if len(regInfo.Sources) != 1 { // This should be unreachable. panic("register not associated with unique column") - } else if regInfo.Sources[0].perspective != "" { - allocator.allocatePerspective(regInfo.Sources[0].perspective) + } else if regSource.IsVirtual() { + allocator.allocatePerspective(regSource.Perspective()) } } // Initial allocation of perspective registers for iter := allocation.Registers(); iter.HasNext(); { regIndex := iter.Next() regInfo := allocation.Register(regIndex) - - if regInfo.Sources[0].perspective != "" { - allocator.allocateRegister(regInfo.Sources[0].perspective, regIndex) + regSource := regInfo.Sources[0] + // + if regSource.IsVirtual() { + perspective := regSource.Perspective() + allocator.allocateRegister(perspective, regIndex) } } // Done (for now) diff --git a/pkg/corset/binding.go b/pkg/corset/binding.go index d7c81995..ce018193 100644 --- a/pkg/corset/binding.go +++ b/pkg/corset/binding.go @@ -6,19 +6,9 @@ import ( "github.com/consensys/go-corset/pkg/sexp" tr "github.com/consensys/go-corset/pkg/trace" + "github.com/consensys/go-corset/pkg/util" ) -// BindingId is an identifier is used to distinguish different forms of binding, -// as some forms are known from their use. Specifically, at the current time, -// only functions are distinguished from other categories (e.g. columns, -// parameters, etc). -type BindingId struct { - // Name of the binding - name string - // Indicates whether function binding or other. - fn bool -} - // Binding represents an association between a name, as found in a source file, // and concrete item (e.g. a column, function, etc). type Binding interface { @@ -145,12 +135,14 @@ func (p *FunctionSignature) Apply(args []Expr, srcmap *sexp.SourceMaps[Node]) Ex // ColumnBinding represents something bound to a given column. type ColumnBinding struct { - // Column's enclosing module - module string - // Enclosing perspective - perspective string - // Column's name - name string + // Context determines the real (i.e. non-virtual) enclosing module of this + // column, and should always be a prefix of the path. If this column was + // declared in a perspective then it will be the perspective's enclosing + // module. Otherwise, it will exactly match the path's parent. + context util.Path + // Absolute path of column. This determines the name of the column, its + // enclosing module and/or perspective. + path util.Path // Determines whether this is a computed column, or not. computed bool // Determines whether this column must be proven (or not). @@ -167,8 +159,13 @@ type ColumnBinding struct { // definterleaved constraint the target column information (e.g. its type) is // not immediately available and must be determined from those columns from // which it is constructed. -func NewComputedColumnBinding(module string, name string) *ColumnBinding { - return &ColumnBinding{module, "", name, true, false, 0, nil} +func NewComputedColumnBinding(context util.Path, path util.Path) *ColumnBinding { + return &ColumnBinding{context, path, true, false, 0, nil} +} + +// AbsolutePath returns the fully resolved (absolute) path of the column in question. +func (p *ColumnBinding) AbsolutePath() *util.Path { + return &p.path } // IsFinalised checks whether this binding has been finalised yet or not. @@ -185,7 +182,7 @@ func (p *ColumnBinding) Finalise(multiplier uint, datatype Type) { // Context returns the of this column. That is, the module in which this colunm // was declared and also the length multiplier of that module it requires. func (p *ColumnBinding) Context() Context { - return tr.NewContext(p.module, p.multiplier) + return tr.NewContext(p.context.String(), p.multiplier) } // ============================================================================ @@ -194,6 +191,7 @@ func (p *ColumnBinding) Context() Context { // ConstantBinding represents a constant definition type ConstantBinding struct { + path util.Path // Constant expression which, when evaluated, produces a constant value. value Expr // Inferred type of the given expression @@ -202,8 +200,8 @@ type ConstantBinding struct { // NewConstantBinding creates a new constant binding (which is initially not // finalised). -func NewConstantBinding(value Expr) ConstantBinding { - return ConstantBinding{value, nil} +func NewConstantBinding(path util.Path, value Expr) ConstantBinding { + return ConstantBinding{path, value, nil} } // IsFinalised checks whether this binding has been finalised yet or not. diff --git a/pkg/corset/compiler.go b/pkg/corset/compiler.go index 7c1232e0..70c4d2de 100644 --- a/pkg/corset/compiler.go +++ b/pkg/corset/compiler.go @@ -100,7 +100,7 @@ func (p *Compiler) Compile() (*hir.Schema, []SyntaxError) { return nil, errs } // Convert global scope into an environment by allocating all columns. - environment := scope.ToEnvironment(p.allocator) + environment := NewGlobalEnvironment(scope, p.allocator) // Finally, translate everything and add it to the schema. return TranslateCircuit(environment, p.srcmap, &p.circuit) } diff --git a/pkg/corset/declaration.go b/pkg/corset/declaration.go index 853eebdb..03bcb81e 100644 --- a/pkg/corset/declaration.go +++ b/pkg/corset/declaration.go @@ -183,12 +183,12 @@ func (p *DefColumns) Lisp() sexp.SExp { // DefColumn packages together those piece relevant to declaring an individual // column, such its name and type. type DefColumn struct { - // Column name - name string // Binding of this column (which may or may not be finalised). binding ColumnBinding } +var _ SymbolDefinition = &DefColumn{} + // IsFunction is never true for a column definition. func (e *DefColumn) IsFunction() bool { return false @@ -200,9 +200,16 @@ func (e *DefColumn) Binding() Binding { return &e.binding } -// Name of symbol being defined +// Name returns the (unqualified) name of this symbol. For example, "X" for +// a column X defined in a module m1. func (e *DefColumn) Name() string { - return e.name + return e.binding.path.Tail() +} + +// Path returns the qualified name (i.e. absolute path) of this symbol. For +// example, "m1.X" for a column X defined in module m1. +func (e *DefColumn) Path() *util.Path { + return &e.binding.path } // DataType returns the type of this column. If this column have not yet been @@ -241,7 +248,7 @@ func (e *DefColumn) MustProve() bool { // for debugging purposes. func (e *DefColumn) Lisp() sexp.SExp { list := sexp.EmptyList() - list.Append(sexp.NewSymbol(e.name)) + list.Append(sexp.NewSymbol(e.Name())) // if e.binding.dataType != nil { datatype := e.binding.dataType.String() @@ -326,7 +333,7 @@ func (p *DefConst) Lisp() sexp.SExp { def.Append(sexp.NewSymbol("defconst")) // for _, c := range p.constants { - def.Append(sexp.NewSymbol(c.name)) + def.Append(sexp.NewSymbol(c.Name())) def.Append(c.binding.value.Lisp()) } // Done @@ -336,8 +343,6 @@ func (p *DefConst) Lisp() sexp.SExp { // DefConstUnit represents the definition of exactly one constant value. As // such, this is an instance of SymbolDefinition and provides a binding. type DefConstUnit struct { - // Name of the constant being declared. - name string // Binding for this constant. binding ConstantBinding } @@ -353,19 +358,26 @@ func (e *DefConstUnit) Binding() Binding { return &e.binding } -// Name of symbol being defined +// Name returns the (unqualified) name of this symbol. For example, "X" for +// a column X defined in a module m1. func (e *DefConstUnit) Name() string { - return e.name + return e.binding.path.Tail() +} + +// Path returns the qualified name (i.e. absolute path) of this symbol. For +// example, "m1.X" for a column X defined in module m1. +func (e *DefConstUnit) Path() *util.Path { + return &e.binding.path } // Lisp converts this node into its lisp representation. This is primarily used // for debugging purposes. // //nolint:revive -func (p *DefConstUnit) Lisp() sexp.SExp { +func (e *DefConstUnit) Lisp() sexp.SExp { return sexp.NewList([]sexp.SExp{ - sexp.NewSymbol(p.name), - p.binding.value.Lisp()}) + sexp.NewSymbol(e.Name()), + e.binding.value.Lisp()}) } // ============================================================================ @@ -766,9 +778,16 @@ type DefPerspective struct { Columns []*DefColumn } -// Name of symbol being defined +// Name returns the (unqualified) name of this symbol. For example, "X" for +// a column X defined in a module m1. func (p *DefPerspective) Name() string { - return p.symbol.name + return p.symbol.Path().Tail() +} + +// Path returns the qualified name (i.e. absolute path) of this symbol. For +// example, "m1.X" for a column X defined in module m1. +func (p *DefPerspective) Path() *util.Path { + return &p.symbol.path } // IsFunction is always true for a function definition! @@ -903,6 +922,8 @@ type DefFun struct { parameters []*DefParameter } +var _ SymbolDefinition = &DefFun{} + // IsFunction is always true for a function definition! func (p *DefFun) IsFunction() bool { return true @@ -932,9 +953,16 @@ func (p *DefFun) Binding() Binding { return p.symbol.binding } -// Name of symbol being defined +// Name returns the (unqualified) name of this symbol. For example, "X" for +// a column X defined in a module m1. func (p *DefFun) Name() string { - return p.symbol.name + return p.symbol.Path().Tail() +} + +// Path returns the qualified name (i.e. absolute path) of this symbol. For +// example, "m1.X" for a column X defined in module m1. +func (p *DefFun) Path() *util.Path { + return &p.symbol.path } // Definitions returns the set of symbols defined by this declaration. Observe @@ -951,7 +979,8 @@ func (p *DefFun) Dependencies() util.Iterator[Symbol] { // Filter out all parameters declared in this function, since these are not // external dependencies. for _, d := range deps { - if d.IsQualified() || d.IsFunction() || !p.hasParameter(d.Name()) { + n := d.Path() + if n.IsAbsolute() || d.IsFunction() || n.Depth() > 1 || !p.hasParameter(n.Head()) { ndeps = append(ndeps, d) } } @@ -976,7 +1005,7 @@ func (p *DefFun) IsFinalised() bool { func (p *DefFun) Lisp() sexp.SExp { return sexp.NewList([]sexp.SExp{ sexp.NewSymbol("defun"), - sexp.NewSymbol(p.symbol.name), + sexp.NewSymbol(p.symbol.path.Tail()), sexp.NewSymbol("..."), // todo }) } diff --git a/pkg/corset/environment.go b/pkg/corset/environment.go index baa61b4e..290e0159 100644 --- a/pkg/corset/environment.go +++ b/pkg/corset/environment.go @@ -33,8 +33,9 @@ type Environment interface { // (i.e. underlying HIR column identifier). Register(index uint) *Register // RegisterOf identifiers the register (i.e. underlying (HIR) column) to - // which a given source-level (i.e. corset) column is allocated. - RegisterOf(module string, name string) uint + // which a given source-level (i.e. corset) column is allocated. This + // expects an absolute path. + RegisterOf(path *util.Path) uint // RegistersOf identifies the set of registers (i.e. underlying (HIR) // columns) associated with a given module. RegistersOf(module string) []uint @@ -43,13 +44,6 @@ type Environment interface { ContextOf(from Context) tr.Context } -// ColumnId uniquely identifiers a Corset column. Note, however, that -// multiple Corset columns can be mapped to a single underlying register. -type ColumnId struct { - module string - column string -} - // GlobalEnvironment is a wrapper around a global scope. The point, really, is // to signal the change between a global scope whose columns have yet to be // allocated, from an environment whose columns are allocated. @@ -57,7 +51,7 @@ type GlobalEnvironment struct { // Info about modules modules map[string]*ModuleInfo // Map source-level columns to registers - columns map[ColumnId]uint + columns map[string]uint // Registers registers []Register } @@ -66,10 +60,18 @@ type GlobalEnvironment struct { // by allocating appropriate identifiers to all columns. This process is // parameterised upon a given register allocator, thus enabling different // allocation algorithms. -func NewGlobalEnvironment(scope *GlobalScope, allocator func(RegisterAllocation)) GlobalEnvironment { +func NewGlobalEnvironment(root *ModuleScope, allocator func(RegisterAllocation)) GlobalEnvironment { + // Sanity Check + if !root.IsRoot() { + // Definitely should be unreachable. + panic("root scope required") + } + // Construct top-level module list. + modules := root.Flattern() + // Initialise the environment env := GlobalEnvironment{nil, nil, nil} - env.initModules(scope) - env.initColumnsAndRegisters(scope) + env.initModules(modules) + env.initColumnsAndRegisters(modules) // Apply register allocation. env.applyRegisterAllocation(allocator) // Done @@ -88,12 +90,10 @@ func (p GlobalEnvironment) Register(index uint) *Register { return &p.registers[index] } -// RegisterOf identifiers the register (i.e. underlying (HIR) column) to +// RegisterOf identifies the register (i.e. underlying (HIR) column) to // which a given source-level (i.e. corset) column is allocated. -func (p GlobalEnvironment) RegisterOf(module string, name string) uint { - // Construct column identifier. - cid := ColumnId{module, name} - regId := p.columns[cid] +func (p GlobalEnvironment) RegisterOf(column *util.Path) uint { + regId := p.columns[column.String()] // Lookup register info return regId } @@ -115,8 +115,8 @@ func (p GlobalEnvironment) RegistersOf(module string) []uint { } // ColumnsOf returns the set of registers allocated to a given column. -func (p GlobalEnvironment) ColumnsOf(register uint) []ColumnId { - var columns []ColumnId +func (p GlobalEnvironment) ColumnsOf(register uint) []string { + var columns []string // for col, reg := range p.columns { if reg == register { @@ -142,52 +142,51 @@ func (p GlobalEnvironment) ContextOf(from Context) tr.Context { // Module allocation is a simple process of allocating modules their specific // identifiers. This has to match exactly how the translator does it, otherwise // there will be problems. -func (p *GlobalEnvironment) initModules(scope *GlobalScope) { +func (p *GlobalEnvironment) initModules(modules []*ModuleScope) { p.modules = make(map[string]*ModuleInfo) moduleId := uint(0) - // Allocate modules one-by-one - for _, m := range scope.modules { - p.modules[m.module] = &ModuleInfo{m.module, moduleId} - moduleId++ + // Allocate submodules one-by-one + for _, m := range modules { + if !m.virtual { + name := m.path.String() + p.modules[name] = &ModuleInfo{name, moduleId} + moduleId++ + } } } // Performs an initial register allocation which simply maps every column to a // unique register. The intention is that, subsequently, registers can be // merged as necessary. -func (p *GlobalEnvironment) initColumnsAndRegisters(scope *GlobalScope) { - p.columns = make(map[ColumnId]uint) +func (p *GlobalEnvironment) initColumnsAndRegisters(modules []*ModuleScope) { + p.columns = make(map[string]uint) p.registers = make([]Register, 0) // Allocate input columns first. - for _, m := range scope.modules { + for _, m := range modules { + owner := m.Owner() + // for _, b := range m.bindings { if binding, ok := b.(*ColumnBinding); ok && !binding.computed { - if m.module != binding.module { - panic("unreachable?") - } - // - p.allocateColumn(binding) + p.allocateColumn(binding, owner.path) } } } // Allocate assignments second. - for _, m := range scope.modules { + for _, m := range modules { + owner := m.Owner() + // for _, b := range m.bindings { if binding, ok := b.(*ColumnBinding); ok && binding.computed { - if m.module != binding.module { - panic("unreachable?") - } - - p.allocateColumn(binding) + p.allocateColumn(binding, owner.path) } } } // Apply aliases - for _, m := range scope.modules { + for _, m := range modules { for id, binding_id := range m.ids { if binding, ok := m.bindings[binding_id].(*ColumnBinding); ok && !id.fn { - orig := ColumnId{m.module, binding.name} - alias := ColumnId{m.module, id.name} + orig := binding.path.String() + alias := m.path.Extend(id.name).String() p.columns[alias] = p.columns[orig] } } @@ -198,41 +197,52 @@ func (p *GlobalEnvironment) initColumnsAndRegisters(scope *GlobalScope) { // column can correspond to multiple underling registers, this can result in the // allocation of a number of registers (based on the columns type). For // example, an array of length n will allocate n registers, etc. -func (p *GlobalEnvironment) allocateColumn(column *ColumnBinding) { - p.allocate(column, column.name, column.dataType) +func (p *GlobalEnvironment) allocateColumn(column *ColumnBinding, context util.Path) { + p.allocate(column, context, column.path, column.dataType) } -func (p *GlobalEnvironment) allocate(column *ColumnBinding, name string, datatype Type) { +func (p *GlobalEnvironment) allocate(column *ColumnBinding, ctx util.Path, path util.Path, datatype Type) { // Check for base base if datatype.AsUnderlying() != nil { - p.allocateUnit(column, name, datatype.AsUnderlying()) + p.allocateUnit(column, ctx, path, datatype.AsUnderlying()) } else if arraytype, ok := datatype.(*ArrayType); ok { // For now, assume must be an array - p.allocateArray(column, name, arraytype) + p.allocateArray(column, ctx, path, arraytype) } else { panic(fmt.Sprintf("unknown type encountered: %v", datatype)) } } // Allocate an array type -func (p *GlobalEnvironment) allocateArray(column *ColumnBinding, name string, arraytype *ArrayType) { +func (p *GlobalEnvironment) allocateArray(col *ColumnBinding, ctx util.Path, path util.Path, arrtype *ArrayType) { // Allocate n columns - for i := arraytype.min; i <= arraytype.max; i++ { - ith_name := fmt.Sprintf("%s_%d", name, i) - p.allocate(column, ith_name, arraytype.element) + for i := arrtype.min; i <= arrtype.max; i++ { + ith_name := fmt.Sprintf("%s_%d", path.Tail(), i) + ith_path := path.Parent().Extend(ith_name) + p.allocate(col, ctx, *ith_path, arrtype.element) } } // Allocate a single register. -func (p *GlobalEnvironment) allocateUnit(column *ColumnBinding, name string, datatype sc.Type) { - moduleId := p.modules[column.module].Id - colId := ColumnId{column.module, name} +func (p *GlobalEnvironment) allocateUnit(column *ColumnBinding, ctx util.Path, path util.Path, datatype sc.Type) { + module := ctx.String() + // // The name is extracted from the different between the context and the // + // // path. The context must be a prefix of the path and, essentially, // + // // identifies the concrete module where this column will eventually live. + // name := path.Slice(ctx.Depth()).String()[1:] + // // Neaten the name up a bit + // name = strings.ReplaceAll(name, "/", "$") + // + // FIXME: below is used instead of above in order to replicate the original + // Corset tool. Eventually, this behaviour should be deprecated. + name := path.Tail() + // + moduleId := p.modules[module].Id regId := uint(len(p.registers)) // Construct appropriate register source. source := RegisterSource{ - column.module, - column.perspective, - name, + ctx, + path, column.multiplier, datatype, column.mustProve, @@ -245,7 +255,7 @@ func (p *GlobalEnvironment) allocateUnit(column *ColumnBinding, name string, dat []RegisterSource{source}, }) // Map column to register - p.columns[colId] = regId + p.columns[path.String()] = regId } // Apply the given register allocator to each module of this environment in turn. diff --git a/pkg/corset/expression.go b/pkg/corset/expression.go index 77771c01..c481ecd6 100644 --- a/pkg/corset/expression.go +++ b/pkg/corset/expression.go @@ -7,6 +7,7 @@ import ( "github.com/consensys/go-corset/pkg/sexp" tr "github.com/consensys/go-corset/pkg/trace" + "github.com/consensys/go-corset/pkg/util" ) // Expr represents an arbitrary expression over the columns of a given context @@ -82,17 +83,11 @@ func (e *Add) Dependencies() []Symbol { // ArrayAccess represents the a given value taken to a power. type ArrayAccess struct { - name string + path util.Path arg Expr binding Binding } -// IsQualified determines whether this symbol is qualfied or not (i.e. has an -// explicitly module specifier). -func (e *ArrayAccess) IsQualified() bool { - return false -} - // IsFunction indicates whether or not this symbol refers to a function (which // of course it always does). func (e *ArrayAccess) IsFunction() bool { @@ -116,15 +111,9 @@ func (e *ArrayAccess) Multiplicity() uint { return determineMultiplicity([]Expr{e.arg}) } -// Module returns the module used to qualify this array access. At this time, -// however, array accesses are always unqualified. -func (e *ArrayAccess) Module() string { - panic("unqualified array access") -} - -// Name returns the (unqualified) name of this symbol -func (e *ArrayAccess) Name() string { - return e.name +// Path returns the given path of this symbol. +func (e *ArrayAccess) Path() *util.Path { + return &e.path } // Binding gets binding associated with this interface. This will panic if this @@ -148,7 +137,7 @@ func (e *ArrayAccess) Context() Context { // so it can be printed. func (e *ArrayAccess) Lisp() sexp.SExp { return sexp.NewArray([]sexp.SExp{ - sexp.NewSymbol(e.name), + sexp.NewSymbol(e.path.String()), e.arg.Lisp(), }) } @@ -361,7 +350,8 @@ func (e *For) Dependencies() []Symbol { var rest []Symbol // for _, s := range e.Body.Dependencies() { - if s.IsQualified() || s.Name() != e.Binding.name { + p := s.Path() + if p.IsAbsolute() || p.Depth() != 1 || p.Head() != e.Binding.name { rest = append(rest, s) } } @@ -682,7 +672,7 @@ func (e *Reduce) Context() Context { func (e *Reduce) Lisp() sexp.SExp { return sexp.NewList([]sexp.SExp{ sexp.NewSymbol("reduce"), - sexp.NewSymbol(e.fn.name), + sexp.NewSymbol(e.fn.Path().String()), e.arg.Lisp()}) } @@ -799,8 +789,7 @@ func (e *Shift) Dependencies() []Symbol { // VariableAccess represents reading the value of a given local variable (such // as a function parameter). type VariableAccess struct { - module *string - name string + path util.Path fn bool binding Binding } @@ -815,10 +804,9 @@ func (e *VariableAccess) AsConstant() *big.Int { return nil } -// IsQualified determines whether this symbol is qualfied or not (i.e. has an -// explicitly module specifier). -func (e *VariableAccess) IsQualified() bool { - return e.module != nil +// Path returns the given path of this symbol. +func (e *VariableAccess) Path() *util.Path { + return &e.path } // IsFunction determines whether this symbol refers to a function (which, of @@ -850,17 +838,6 @@ func (e *VariableAccess) Resolve(binding Binding) bool { return true } -// Module returns the optional module qualification. This will panic if this -// invocation is unqualified. -func (e *VariableAccess) Module() string { - return *e.module -} - -// Name returns the (unqualified) name of this symbol -func (e *VariableAccess) Name() string { - return e.name -} - // Binding gets binding associated with this interface. This will panic if this // symbol is not yet resolved. func (e *VariableAccess) Binding() Binding { @@ -893,14 +870,7 @@ func (e *VariableAccess) Context() Context { // Lisp converts this schema element into a simple S-Expression, for example // so it can be printed.a func (e *VariableAccess) Lisp() sexp.SExp { - var name string - if e.module != nil { - name = fmt.Sprintf("%s.%s", *e.module, e.name) - } else { - name = e.name - } - // - return sexp.NewSymbol(name) + return sexp.NewSymbol(e.path.String()) } // Dependencies needed to signal declaration. @@ -937,7 +907,7 @@ func Substitute(expr Expr, mapping map[uint]Expr, srcmap *sexp.SourceMaps[Node]) switch e := expr.(type) { case *ArrayAccess: arg := Substitute(e.arg, mapping, srcmap) - nexpr = &ArrayAccess{e.name, arg, e.binding} + nexpr = &ArrayAccess{e.path, arg, e.binding} case *Add: args := SubstituteAll(e.Args, mapping, srcmap) nexpr = &Add{args} @@ -1034,7 +1004,7 @@ func ShallowCopy(expr Expr) Expr { // switch e := expr.(type) { case *ArrayAccess: - return &ArrayAccess{e.name, e.arg, e.binding} + return &ArrayAccess{e.path, e.arg, e.binding} case *Add: return &Add{e.Args} case *Constant: @@ -1062,7 +1032,7 @@ func ShallowCopy(expr Expr) Expr { case *Shift: return &Shift{e.Arg, e.Shift} case *VariableAccess: - return &VariableAccess{e.module, e.name, e.fn, e.binding} + return &VariableAccess{e.path, e.fn, e.binding} default: panic(fmt.Sprintf("unknown expression (%s)", reflect.TypeOf(expr))) } diff --git a/pkg/corset/intrinsics.go b/pkg/corset/intrinsics.go index 9790b07b..569b1522 100644 --- a/pkg/corset/intrinsics.go +++ b/pkg/corset/intrinsics.go @@ -5,6 +5,7 @@ import ( "math" "github.com/consensys/go-corset/pkg/sexp" + "github.com/consensys/go-corset/pkg/util" ) // IntrinsicDefinition is a SymbolDefinition for an intrinsic (i.e. built-in) @@ -29,6 +30,13 @@ func (p *IntrinsicDefinition) Name() string { return p.name } +// Path returns the qualified name (i.e. absolute path) of this symbol. For +// example, "m1.X" for a column X defined in module m1. +func (p *IntrinsicDefinition) Path() *util.Path { + path := util.NewAbsolutePath(p.name) + return &path +} + // IsPure checks whether this pure (which intrinsics always are). func (p *IntrinsicDefinition) IsPure() bool { return true @@ -121,8 +129,9 @@ func intrinsicNaryBody(arity uint) []Expr { // for i := uint(0); i != arity; i++ { name := fmt.Sprintf("x%d", i) + path := util.NewAbsolutePath(name) binding := &LocalVariableBinding{name, nil, i} - args[i] = &VariableAccess{nil, name, true, binding} + args[i] = &VariableAccess{path, true, binding} } // return args diff --git a/pkg/corset/parser.go b/pkg/corset/parser.go index bb4ad06d..5d1d9d2b 100644 --- a/pkg/corset/parser.go +++ b/pkg/corset/parser.go @@ -11,6 +11,7 @@ import ( "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" "github.com/consensys/go-corset/pkg/sexp" + "github.com/consensys/go-corset/pkg/util" ) // =================================================================== @@ -86,6 +87,7 @@ func ParseSourceFile(srcfile *sexp.SourceFile) (Circuit, *sexp.SourceMap[Node], var ( circuit Circuit errors []SyntaxError + path util.Path = util.NewAbsolutePath() ) // Parse bytes into an S-Expression terms, srcmap, err := srcfile.ParseAll() @@ -97,7 +99,7 @@ func ParseSourceFile(srcfile *sexp.SourceFile) (Circuit, *sexp.SourceMap[Node], p := NewParser(srcfile, srcmap) // Parse whatever is declared at the beginning of the file before the first // module declaration. These declarations form part of the "prelude". - if circuit.Declarations, terms, errors = p.parseModuleContents("", terms); len(errors) > 0 { + if circuit.Declarations, terms, errors = p.parseModuleContents(path, terms); len(errors) > 0 { return circuit, nil, errors } // Continue parsing string until nothing remains. @@ -111,7 +113,8 @@ func ParseSourceFile(srcfile *sexp.SourceFile) (Circuit, *sexp.SourceMap[Node], return circuit, nil, errors } // Parse module contents - if decls, terms, errors = p.parseModuleContents(name, terms[1:]); len(errors) > 0 { + path = util.NewAbsolutePath(name) + if decls, terms, errors = p.parseModuleContents(path, terms[1:]); len(errors) > 0 { return circuit, nil, errors } else if len(decls) != 0 { circuit.Modules = append(circuit.Modules, Module{name, decls}) @@ -181,7 +184,7 @@ func (p *Parser) mapSourceNode(from sexp.SExp, to Node) { } // Extract all declarations associated with a given module and package them up. -func (p *Parser) parseModuleContents(module string, terms []sexp.SExp) ([]Declaration, []sexp.SExp, []SyntaxError) { +func (p *Parser) parseModuleContents(path util.Path, terms []sexp.SExp) ([]Declaration, []sexp.SExp, []SyntaxError) { var errors []SyntaxError // decls := make([]Declaration, 0) @@ -194,7 +197,7 @@ func (p *Parser) parseModuleContents(module string, terms []sexp.SExp) ([]Declar errors = append(errors, *err) } else if e.MatchSymbols(2, "module") { return decls, terms[i:], errors - } else if decl, errs := p.parseDeclaration(module, e); len(errs) > 0 { + } else if decl, errs := p.parseDeclaration(path, e); len(errs) > 0 { errors = append(errors, errs...) } else { // Continue accumulating declarations for this module. @@ -229,7 +232,7 @@ func (p *Parser) parseModuleStart(s sexp.SExp) (string, []SyntaxError) { return name, nil } -func (p *Parser) parseDeclaration(module string, s *sexp.List) (Declaration, []SyntaxError) { +func (p *Parser) parseDeclaration(module util.Path, s *sexp.List) (Declaration, []SyntaxError) { var ( decl Declaration errors []SyntaxError @@ -240,15 +243,15 @@ func (p *Parser) parseDeclaration(module string, s *sexp.List) (Declaration, []S } else if s.MatchSymbols(1, "defcolumns") { decl, errors = p.parseDefColumns(module, s) } else if s.Len() > 1 && s.MatchSymbols(1, "defconst") { - decl, errors = p.parseDefConst(s.Elements) + decl, errors = p.parseDefConst(module, s.Elements) } else if s.Len() == 4 && s.MatchSymbols(2, "defconstraint") { - decl, errors = p.parseDefConstraint(s.Elements) + decl, errors = p.parseDefConstraint(module, s.Elements) } else if s.MatchSymbols(1, "defunalias") { decl, errors = p.parseDefAlias(true, s.Elements) } else if s.Len() == 3 && s.MatchSymbols(1, "defpurefun") { - decl, errors = p.parseDefFun(true, s.Elements) + decl, errors = p.parseDefFun(module, true, s.Elements) } else if s.Len() == 3 && s.MatchSymbols(1, "defun") { - decl, errors = p.parseDefFun(false, s.Elements) + decl, errors = p.parseDefFun(module, false, s.Elements) } else if s.Len() == 3 && s.MatchSymbols(1, "definrange") { decl, errors = p.parseDefInRange(s.Elements) } else if s.Len() == 3 && s.MatchSymbols(1, "definterleaved") { @@ -293,7 +296,9 @@ func (p *Parser) parseDefAlias(functions bool, elements []sexp.SExp) (Declaratio errors = append(errors, *p.translator.SyntaxError(elements[i+1], "invalid alias definition")) } else { alias := &DefAlias{elements[i].AsSymbol().Value} - name := NewName[Binding](elements[i+1].AsSymbol().Value, functions) + path := util.NewRelativePath(elements[i+1].AsSymbol().Value) + name := NewName[Binding](path, functions) + // p.mapSourceNode(elements[i], alias) p.mapSourceNode(elements[i+1], name) // @@ -306,7 +311,7 @@ func (p *Parser) parseDefAlias(functions bool, elements []sexp.SExp) (Declaratio } // Parse a column declaration -func (p *Parser) parseDefColumns(module string, l *sexp.List) (Declaration, []SyntaxError) { +func (p *Parser) parseDefColumns(module util.Path, l *sexp.List) (Declaration, []SyntaxError) { columns := make([]*DefColumn, l.Len()-1) // Sanity check declaration if len(l.Elements) == 1 { @@ -317,7 +322,7 @@ func (p *Parser) parseDefColumns(module string, l *sexp.List) (Declaration, []Sy var errors []SyntaxError // Process column declarations one by one. for i := 1; i < len(l.Elements); i++ { - decl, err := p.parseColumnDeclaration(module, "", false, l.Elements[i]) + decl, err := p.parseColumnDeclaration(module, module, false, l.Elements[i]) // Extract column name if err != nil { errors = append(errors, *err) @@ -333,12 +338,12 @@ func (p *Parser) parseDefColumns(module string, l *sexp.List) (Declaration, []Sy return &DefColumns{columns}, nil } -func (p *Parser) parseColumnDeclaration(module string, perspective string, computed bool, +func (p *Parser) parseColumnDeclaration(context util.Path, path util.Path, computed bool, e sexp.SExp) (*DefColumn, *SyntaxError) { // var ( error *SyntaxError - binding ColumnBinding = ColumnBinding{module, perspective, "", computed, false, 0, nil} + binding ColumnBinding = ColumnBinding{context, path, computed, false, 0, nil} ) // Set defaults for input columns if !computed { @@ -355,16 +360,16 @@ func (p *Parser) parseColumnDeclaration(module string, perspective string, compu return nil, p.translator.SyntaxError(l.Elements[0], "invalid column name") } // Column name is always first - binding.name = l.Elements[0].String(false) + binding.path = *path.Extend(l.Elements[0].String(false)) // Parse type (if applicable) if binding.dataType, binding.mustProve, error = p.parseColumnDeclarationAttributes(l.Elements[1:]); error != nil { return nil, error } } else { - binding.name = e.String(false) + binding.path = *path.Extend(e.String(false)) } // - def := &DefColumn{binding.name, binding} + def := &DefColumn{binding} // Update source mapping p.mapSourceNode(e, def) // @@ -449,7 +454,7 @@ func (p *Parser) parseArrayDimension(s sexp.SExp) (uint, uint, *SyntaxError) { } // Parse a constant declaration -func (p *Parser) parseDefConst(elements []sexp.SExp) (Declaration, []SyntaxError) { +func (p *Parser) parseDefConst(module util.Path, elements []sexp.SExp) (Declaration, []SyntaxError) { var ( errors []SyntaxError constants []*DefConstUnit @@ -465,7 +470,8 @@ func (p *Parser) parseDefConst(elements []sexp.SExp) (Declaration, []SyntaxError errors = append(errors, *p.translator.SyntaxError(elements[i], "invalid constant name")) } else { // Attempt to parse definition - constant, errs := p.parseDefConstUnit(elements[i].AsSymbol().Value, elements[i+1]) + path := module.Extend(elements[i].AsSymbol().Value) + constant, errs := p.parseDefConstUnit(*path, elements[i+1]) errors = append(errors, errs...) constants = append(constants, constant) } @@ -474,14 +480,14 @@ func (p *Parser) parseDefConst(elements []sexp.SExp) (Declaration, []SyntaxError return &DefConst{constants}, errors } -func (p *Parser) parseDefConstUnit(name string, value sexp.SExp) (*DefConstUnit, []SyntaxError) { +func (p *Parser) parseDefConstUnit(name util.Path, value sexp.SExp) (*DefConstUnit, []SyntaxError) { expr, errors := p.translator.Translate(value) // Check for errors if len(errors) != 0 { return nil, errors } // Looks good - def := &DefConstUnit{name, NewConstantBinding(expr)} + def := &DefConstUnit{NewConstantBinding(name, expr)} // Map to source node p.mapSourceNode(value, def) // Done @@ -489,7 +495,7 @@ func (p *Parser) parseDefConstUnit(name string, value sexp.SExp) (*DefConstUnit, } // Parse a vanishing declaration -func (p *Parser) parseDefConstraint(elements []sexp.SExp) (Declaration, []SyntaxError) { +func (p *Parser) parseDefConstraint(module util.Path, elements []sexp.SExp) (Declaration, []SyntaxError) { var errors []SyntaxError // Initial sanity checks if !isIdentifier(elements[1]) { @@ -497,7 +503,7 @@ func (p *Parser) parseDefConstraint(elements []sexp.SExp) (Declaration, []Syntax } // Vanishing constraints do not have global scope, hence qualified column // accesses are not permitted. - domain, guard, perspective, errs := p.parseConstraintAttributes(elements[2]) + domain, guard, perspective, errs := p.parseConstraintAttributes(module, elements[2]) errors = append(errors, errs...) // Translate expression expr, errs := p.translator.Translate(elements[3]) @@ -511,7 +517,7 @@ func (p *Parser) parseDefConstraint(elements []sexp.SExp) (Declaration, []Syntax } // Parse a interleaved declaration -func (p *Parser) parseDefInterleaved(module string, elements []sexp.SExp) (Declaration, []SyntaxError) { +func (p *Parser) parseDefInterleaved(module util.Path, elements []sexp.SExp) (Declaration, []SyntaxError) { var ( errors []SyntaxError sources []Symbol @@ -534,7 +540,11 @@ func (p *Parser) parseDefInterleaved(module string, elements []sexp.SExp) (Decla errors = append(errors, *p.translator.SyntaxError(ith, "malformed source column")) } else { // Extract column name - sources[i] = NewColumnName(ith.AsSymbol().Value) + name := ith.AsSymbol().Value + // Construct absolute path of column + path := module.Extend(name) + // + sources[i] = NewColumnName(*path) p.mapSourceNode(ith, sources[i]) } } @@ -544,9 +554,9 @@ func (p *Parser) parseDefInterleaved(module string, elements []sexp.SExp) (Decla return nil, errors } // - name := elements[1].AsSymbol().Value - binding := NewComputedColumnBinding(module, name) - target := &DefColumn{name, *binding} + path := module.Extend(elements[1].AsSymbol().Value) + binding := NewComputedColumnBinding(module, *path) + target := &DefColumn{*binding} // Updating mapping for target definition p.mapSourceNode(elements[1], target) // Done @@ -604,7 +614,7 @@ func (p *Parser) parseDefLookup(elements []sexp.SExp) (Declaration, []SyntaxErro } // Parse a permutation declaration -func (p *Parser) parseDefPermutation(module string, elements []sexp.SExp) (Declaration, []SyntaxError) { +func (p *Parser) parseDefPermutation(module util.Path, elements []sexp.SExp) (Declaration, []SyntaxError) { var ( errors []SyntaxError sources []Symbol @@ -639,11 +649,11 @@ func (p *Parser) parseDefPermutation(module string, elements []sexp.SExp) (Decla for i := 0; i < min(len(sources), len(targets)); i++ { var err *SyntaxError // Parse target column - if targets[i], err = p.parseColumnDeclaration(module, "", true, sexpTargets.Get(i)); err != nil { + if targets[i], err = p.parseColumnDeclaration(module, module, true, sexpTargets.Get(i)); err != nil { errors = append(errors, *err) } // Parse source column - if sources[i], signs[i], err = p.parsePermutedColumnDeclaration(i == 0, sexpSources.Get(i)); err != nil { + if sources[i], signs[i], err = p.parsePermutedColumnDeclaration(module, i == 0, sexpSources.Get(i)); err != nil { errors = append(errors, *err) } } @@ -656,10 +666,12 @@ func (p *Parser) parseDefPermutation(module string, elements []sexp.SExp) (Decla return &DefPermutation{targets, sources, signs}, nil } -func (p *Parser) parsePermutedColumnDeclaration(signRequired bool, e sexp.SExp) (*ColumnName, bool, *SyntaxError) { +func (p *Parser) parsePermutedColumnDeclaration(module util.Path, signRequired bool, + e sexp.SExp) (*ColumnName, bool, *SyntaxError) { + // var ( err *SyntaxError - name *ColumnName + name string sign bool ) // Check whether extended declaration or not. @@ -677,16 +689,19 @@ func (p *Parser) parsePermutedColumnDeclaration(signRequired bool, e sexp.SExp) return nil, false, err } // Parse column name - name = NewColumnName(l.Get(1).AsSymbol().Value) + name = l.Get(1).AsSymbol().Value } else if signRequired { return nil, false, p.translator.SyntaxError(e, "missing sort direction") } else { - name = NewColumnName(e.String(false)) + name = e.String(false) } + // + path := module.Extend(name) + colname := NewColumnName(*path) // Update source mapping - p.mapSourceNode(e, name) + p.mapSourceNode(e, colname) // - return name, sign, nil + return colname, sign, nil } func (p *Parser) parsePermutedColumnSign(sign *sexp.Symbol) (bool, *SyntaxError) { @@ -701,13 +716,12 @@ func (p *Parser) parsePermutedColumnSign(sign *sexp.Symbol) (bool, *SyntaxError) } // Parse a perspective declaration -func (p *Parser) parseDefPerspective(module string, elements []sexp.SExp) (Declaration, []SyntaxError) { +func (p *Parser) parseDefPerspective(module util.Path, elements []sexp.SExp) (Declaration, []SyntaxError) { var ( - errors []SyntaxError - sexp_columns *sexp.List = elements[3].AsList() - columns []*DefColumn - perspective *PerspectiveName - perspective_name string + errors []SyntaxError + sexp_columns *sexp.List = elements[3].AsList() + columns []*DefColumn + perspective *PerspectiveName ) // Check for columns if sexp_columns == nil { @@ -719,17 +733,15 @@ func (p *Parser) parseDefPerspective(module string, elements []sexp.SExp) (Decla // Parse perspective selector binding := NewPerspectiveBinding(selector) // Parse perspective name - if perspective, errs = parseSymbolName(p, elements[1], false, binding); len(errs) == 0 { - perspective_name = perspective.name - } else { + if perspective, errs = parseSymbolName(p, elements[1], module, false, binding); len(errs) != 0 { errors = append(errors, errs...) } // Process column declarations one by one. - if sexp_columns != nil { + if sexp_columns != nil && perspective != nil { columns = make([]*DefColumn, sexp_columns.Len()) for i := 0; i < len(sexp_columns.Elements); i++ { - decl, err := p.parseColumnDeclaration(module, perspective_name, false, sexp_columns.Elements[i]) + decl, err := p.parseColumnDeclaration(module, *perspective.Path(), false, sexp_columns.Elements[i]) // Extract column name if err != nil { errors = append(errors, *err) @@ -767,7 +779,7 @@ func (p *Parser) parseDefProperty(elements []sexp.SExp) (Declaration, []SyntaxEr } // Parse a permutation declaration -func (p *Parser) parseDefFun(pure bool, elements []sexp.SExp) (Declaration, []SyntaxError) { +func (p *Parser) parseDefFun(module util.Path, pure bool, elements []sexp.SExp) (Declaration, []SyntaxError) { var ( name *sexp.Symbol ret Type @@ -795,8 +807,9 @@ func (p *Parser) parseDefFun(pure bool, elements []sexp.SExp) (Declaration, []Sy paramTypes[i] = p.Binding.datatype } // Construct binding + path := module.Extend(name.Value) binding := NewDefunBinding(pure, paramTypes, ret, body) - fn_name := NewFunctionName(name.Value, &binding) + fn_name := NewFunctionName(*path, &binding) // Update source mapping p.mapSourceNode(name, fn_name) // @@ -917,7 +930,7 @@ func (p *Parser) parseDefInRange(elements []sexp.SExp) (Declaration, []SyntaxErr return &DefInRange{Expr: expr, Bound: bound}, nil } -func (p *Parser) parseConstraintAttributes(attributes sexp.SExp) (domain *int, guard Expr, +func (p *Parser) parseConstraintAttributes(module util.Path, attributes sexp.SExp) (domain *int, guard Expr, perspective *PerspectiveName, err []SyntaxError) { // var errors []SyntaxError @@ -945,8 +958,7 @@ func (p *Parser) parseConstraintAttributes(attributes sexp.SExp) (domain *int, g guard, errs = p.translator.Translate(attrs.Get(i)) case ":perspective": i++ - //binding := NewPerspectiveBinding() - perspective, errs = parseSymbolName[*PerspectiveBinding](p, attrs.Get(i), false, nil) + perspective, errs = parseSymbolName[*PerspectiveBinding](p, attrs.Get(i), module, false, nil) default: errs = p.translator.SyntaxErrors(ith, "unknown attribute") } @@ -965,12 +977,15 @@ func (p *Parser) parseConstraintAttributes(attributes sexp.SExp) (domain *int, g } // Parse a symbol name, which will include a binding. -func parseSymbolName[T Binding](p *Parser, symbol sexp.SExp, function bool, binding T) (*Name[T], []SyntaxError) { +func parseSymbolName[T Binding](p *Parser, symbol sexp.SExp, module util.Path, function bool, + binding T) (*Name[T], []SyntaxError) { + // if !isEitherOrIdentifier(symbol, function) { return nil, p.translator.SyntaxErrors(symbol, "expected identifier") } // Extract - name := &Name[T]{symbol.AsSymbol().Value, function, binding, false} + path := module.Extend(symbol.AsSymbol().Value) + name := &Name[T]{*path, function, binding, false} // Update source mapping p.mapSourceNode(symbol, name) // Construct @@ -1168,7 +1183,8 @@ func reduceParserRule(p *Parser) sexp.ListRule[Expr] { return nil, errors } // - varaccess := &VariableAccess{nil, name.Value, true, nil} + path := util.NewRelativePath(name.Value) + varaccess := &VariableAccess{path, true, nil} p.mapSourceNode(name, varaccess) // Done return &Reduce{varaccess, nil, body}, nil @@ -1209,14 +1225,12 @@ func varAccessParserRule(col string) (Expr, bool, error) { } // Handle qualified accesses (where permitted) // Attempt to split column name into module / column pair. - split := strings.Split(col, ".") - if len(split) == 2 { - return &VariableAccess{&split[0], split[1], false, nil}, true, nil - } else if len(split) > 2 { - return nil, true, errors.New("malformed column access") - } else { - return &VariableAccess{nil, col, false, nil}, true, nil + path, err := parseQualifiableName(col) + if err != nil { + return nil, true, err } + // + return &VariableAccess{path, false, nil}, true, nil } func arrayAccessParserRule(name string, args []Expr) (Expr, error) { @@ -1224,7 +1238,9 @@ func arrayAccessParserRule(name string, args []Expr) (Expr, error) { return nil, errors.New("malformed array access") } // - return &ArrayAccess{name, args[0], nil}, nil + path := util.NewRelativePath(name) + // + return &ArrayAccess{path, args[0], nil}, nil } func addParserRule(_ string, args []Expr) (Expr, error) { @@ -1266,15 +1282,12 @@ func invokeParserRule(p *Parser) sexp.ListRule[Expr] { errors = append(errors, *p.translator.SyntaxError(list.Get(0), "invalid function name")) } // Handle qualified accesses (where permitted) - // Attempt to split column name into module / column pair. - split := strings.Split(name.Value, ".") - if len(split) == 2 { - // - varaccess = &VariableAccess{&split[0], split[1], true, nil} - } else if len(split) > 2 { + path, err := parseQualifiableName(name.Value) + // + if err != nil { return nil, p.translator.SyntaxErrors(list.Get(0), "invalid function name") } else { - varaccess = &VariableAccess{nil, split[0], true, nil} + varaccess = &VariableAccess{path, true, nil} } // Parse arguments args := make([]Expr, list.Len()-1) @@ -1318,6 +1331,38 @@ func normParserRule(_ string, args []Expr) (Expr, error) { return &Normalise{Arg: args[0]}, nil } +// Parse a name which can be (optionally) adorned with either a module or +// perspective qualifier, or both. +func parseQualifiableName(qualName string) (path util.Path, err error) { + // Look for module qualification + split := strings.Split(qualName, ".") + switch len(split) { + case 1: + return parsePerspectifiableName(qualName) + case 2: + module := split[0] + path, err := parsePerspectifiableName(split[1]) + + return *path.PushRoot(module), err + default: + return path, errors.New("malformed qualified name") + } +} + +// Parse a name which can (optionally) adorned with a perspective qualifier +func parsePerspectifiableName(qualName string) (path util.Path, err error) { + // Look for module qualification + split := strings.Split(qualName, "/") + switch len(split) { + case 1: + return util.NewRelativePath(split[0]), nil + case 2: + return util.NewRelativePath(split[0], split[1]), nil + default: + return path, errors.New("malformed qualified name") + } +} + // Attempt to parse an S-Expression as an identifier, return nil if this fails. // The function flag switches this to identifiers suitable for functions and // invocations. diff --git a/pkg/corset/preprocessor.go b/pkg/corset/preprocessor.go index 4d8ac2f9..e493c1be 100644 --- a/pkg/corset/preprocessor.go +++ b/pkg/corset/preprocessor.go @@ -31,10 +31,10 @@ type preprocessor struct { // preprocess all assignment or constraint declarations in the circuit. func (p *preprocessor) preprocessDeclarations(circuit *Circuit) []SyntaxError { - errors := p.preprocessDeclarationsInModule("", circuit.Declarations) + errors := p.preprocessDeclarationsInModule(circuit.Declarations) // preprocess each module for _, m := range circuit.Modules { - errs := p.preprocessDeclarationsInModule(m.Name, m.Declarations) + errs := p.preprocessDeclarationsInModule(m.Declarations) errors = append(errors, errs...) } // Done @@ -43,11 +43,11 @@ func (p *preprocessor) preprocessDeclarations(circuit *Circuit) []SyntaxError { // preprocess all assignment or constraint declarations in a given module within // the circuit. -func (p *preprocessor) preprocessDeclarationsInModule(module string, decls []Declaration) []SyntaxError { +func (p *preprocessor) preprocessDeclarationsInModule(decls []Declaration) []SyntaxError { var errors []SyntaxError // for _, d := range decls { - errs := p.preprocessDeclaration(d, module) + errs := p.preprocessDeclaration(d) errors = append(errors, errs...) } // Done @@ -56,7 +56,7 @@ func (p *preprocessor) preprocessDeclarationsInModule(module string, decls []Dec // preprocess an assignment or constraint declarartion which occurs within a // given module. -func (p *preprocessor) preprocessDeclaration(decl Declaration, module string) []SyntaxError { +func (p *preprocessor) preprocessDeclaration(decl Declaration) []SyntaxError { var errors []SyntaxError // switch d := decl.(type) { @@ -67,21 +67,21 @@ func (p *preprocessor) preprocessDeclaration(decl Declaration, module string) [] case *DefConst: // ignore case *DefConstraint: - errors = p.preprocessDefConstraint(d, module) + errors = p.preprocessDefConstraint(d) case *DefFun: // ignore case *DefInRange: - errors = p.preprocessDefInRange(d, module) + errors = p.preprocessDefInRange(d) case *DefInterleaved: // ignore case *DefLookup: - errors = p.preprocessDefLookup(d, module) + errors = p.preprocessDefLookup(d) case *DefPermutation: // ignore case *DefPerspective: - errors = p.preprocessDefPerspective(d, module) + errors = p.preprocessDefPerspective(d) case *DefProperty: - errors = p.preprocessDefProperty(d, module) + errors = p.preprocessDefProperty(d) default: // Error handling panic("unknown declaration") @@ -91,15 +91,15 @@ func (p *preprocessor) preprocessDeclaration(decl Declaration, module string) [] } // preprocess a "defconstraint" declaration. -func (p *preprocessor) preprocessDefConstraint(decl *DefConstraint, module string) []SyntaxError { +func (p *preprocessor) preprocessDefConstraint(decl *DefConstraint) []SyntaxError { var ( constraint_errors []SyntaxError guard_errors []SyntaxError ) // preprocess constraint body - decl.Constraint, constraint_errors = p.preprocessExpressionInModule(decl.Constraint, module) + decl.Constraint, constraint_errors = p.preprocessExpressionInModule(decl.Constraint) // preprocess (optional) guard - decl.Guard, guard_errors = p.preprocessOptionalExpressionInModule(decl.Guard, module) + decl.Guard, guard_errors = p.preprocessOptionalExpressionInModule(decl.Guard) // Combine errors return append(constraint_errors, guard_errors...) } @@ -107,41 +107,41 @@ func (p *preprocessor) preprocessDefConstraint(decl *DefConstraint, module strin // preprocess a "deflookup" declaration. // //nolint:staticcheck -func (p *preprocessor) preprocessDefLookup(decl *DefLookup, module string) []SyntaxError { +func (p *preprocessor) preprocessDefLookup(decl *DefLookup) []SyntaxError { var ( source_errs []SyntaxError target_errs []SyntaxError ) // preprocess source expressions - decl.Sources, source_errs = p.preprocessExpressionsInModule(decl.Sources, module) - decl.Targets, target_errs = p.preprocessExpressionsInModule(decl.Targets, module) + decl.Sources, source_errs = p.preprocessExpressionsInModule(decl.Sources) + decl.Targets, target_errs = p.preprocessExpressionsInModule(decl.Targets) // Combine errors return append(source_errs, target_errs...) } // preprocess a "definrange" declaration. -func (p *preprocessor) preprocessDefInRange(decl *DefInRange, module string) []SyntaxError { +func (p *preprocessor) preprocessDefInRange(decl *DefInRange) []SyntaxError { var errors []SyntaxError // preprocess constraint body - decl.Expr, errors = p.preprocessExpressionInModule(decl.Expr, module) + decl.Expr, errors = p.preprocessExpressionInModule(decl.Expr) // Done return errors } // preprocess a "defperspective" declaration. -func (p *preprocessor) preprocessDefPerspective(decl *DefPerspective, module string) []SyntaxError { +func (p *preprocessor) preprocessDefPerspective(decl *DefPerspective) []SyntaxError { var errors []SyntaxError // preprocess selector expression - decl.Selector, errors = p.preprocessExpressionInModule(decl.Selector, module) + decl.Selector, errors = p.preprocessExpressionInModule(decl.Selector) // Combine errors return errors } // preprocess a "defproperty" declaration. -func (p *preprocessor) preprocessDefProperty(decl *DefProperty, module string) []SyntaxError { +func (p *preprocessor) preprocessDefProperty(decl *DefProperty) []SyntaxError { var errors []SyntaxError // preprocess constraint body - decl.Assertion, errors = p.preprocessExpressionInModule(decl.Assertion, module) + decl.Assertion, errors = p.preprocessExpressionInModule(decl.Assertion) // Done return errors } @@ -149,10 +149,10 @@ func (p *preprocessor) preprocessDefProperty(decl *DefProperty, module string) [ // preprocess an optional expression in a given context. That is an expression // which maybe nil (i.e. doesn't exist). In such case, nil is returned (i.e. // without any errors). -func (p *preprocessor) preprocessOptionalExpressionInModule(expr Expr, module string) (Expr, []SyntaxError) { +func (p *preprocessor) preprocessOptionalExpressionInModule(expr Expr) (Expr, []SyntaxError) { // if expr != nil { - return p.preprocessExpressionInModule(expr, module) + return p.preprocessExpressionInModule(expr) } return nil, nil @@ -161,7 +161,7 @@ func (p *preprocessor) preprocessOptionalExpressionInModule(expr Expr, module st // preprocess a sequence of zero or more expressions enclosed in a given module. // All expressions are expected to be non-voidable (see below for more on // voidability). -func (p *preprocessor) preprocessExpressionsInModule(exprs []Expr, module string) ([]Expr, []SyntaxError) { +func (p *preprocessor) preprocessExpressionsInModule(exprs []Expr) ([]Expr, []SyntaxError) { // errors := []SyntaxError{} hirExprs := make([]Expr, len(exprs)) @@ -169,7 +169,7 @@ func (p *preprocessor) preprocessExpressionsInModule(exprs []Expr, module string for i, e := range exprs { if e != nil { var errs []SyntaxError - hirExprs[i], errs = p.preprocessExpressionInModule(e, module) + hirExprs[i], errs = p.preprocessExpressionInModule(e) errors = append(errors, errs...) // Check for non-voidability if hirExprs[i] == nil { @@ -186,7 +186,7 @@ func (p *preprocessor) preprocessExpressionsInModule(exprs []Expr, module string // expressions. That is, essentially, to account for debug constraints which // only exist in debug mode. Hence, when debug mode is not enabled, then a // debug constraint is "void". -func (p *preprocessor) preprocessVoidableExpressionsInModule(exprs []Expr, module string) ([]Expr, []SyntaxError) { +func (p *preprocessor) preprocessVoidableExpressionsInModule(exprs []Expr) ([]Expr, []SyntaxError) { // errors := []SyntaxError{} hirExprs := make([]Expr, len(exprs)) @@ -195,7 +195,7 @@ func (p *preprocessor) preprocessVoidableExpressionsInModule(exprs []Expr, modul for i, e := range exprs { if e != nil { var errs []SyntaxError - hirExprs[i], errs = p.preprocessExpressionInModule(e, module) + hirExprs[i], errs = p.preprocessExpressionInModule(e) errors = append(errors, errs...) // Update dirty flag if hirExprs[i] == nil { @@ -228,7 +228,7 @@ func (p *preprocessor) preprocessVoidableExpressionsInModule(exprs []Expr, modul // preprocess an expression situated in a given context. The context is // necessary to resolve unqualified names (e.g. for column access, function // invocations, etc). -func (p *preprocessor) preprocessExpressionInModule(expr Expr, module string) (Expr, []SyntaxError) { +func (p *preprocessor) preprocessExpressionInModule(expr Expr) (Expr, []SyntaxError) { var ( nexpr Expr errors []SyntaxError @@ -236,48 +236,48 @@ func (p *preprocessor) preprocessExpressionInModule(expr Expr, module string) (E // switch e := expr.(type) { case *ArrayAccess: - arg, errs := p.preprocessExpressionInModule(e.arg, module) - nexpr, errors = &ArrayAccess{e.name, arg, e.binding}, errs + arg, errs := p.preprocessExpressionInModule(e.arg) + nexpr, errors = &ArrayAccess{e.path, arg, e.binding}, errs case *Add: - args, errs := p.preprocessExpressionsInModule(e.Args, module) + args, errs := p.preprocessExpressionsInModule(e.Args) nexpr, errors = &Add{args}, errs case *Constant: return e, nil case *Debug: if p.debug { - return p.preprocessExpressionInModule(e.Arg, module) + return p.preprocessExpressionInModule(e.Arg) } // When debug is not enabled, return "void". return nil, nil case *Exp: - arg, errs1 := p.preprocessExpressionInModule(e.Arg, module) - pow, errs2 := p.preprocessExpressionInModule(e.Pow, module) + arg, errs1 := p.preprocessExpressionInModule(e.Arg) + pow, errs2 := p.preprocessExpressionInModule(e.Pow) // Done nexpr, errors = &Exp{arg, pow}, append(errs1, errs2...) case *For: - return p.preprocessForInModule(e, module) + return p.preprocessForInModule(e) case *If: - args, errs := p.preprocessExpressionsInModule([]Expr{e.Condition, e.TrueBranch, e.FalseBranch}, module) + args, errs := p.preprocessExpressionsInModule([]Expr{e.Condition, e.TrueBranch, e.FalseBranch}) // Construct appropriate if form nexpr, errors = &If{e.kind, args[0], args[1], args[2]}, errs case *Invoke: - return p.preprocessInvokeInModule(e, module) + return p.preprocessInvokeInModule(e) case *List: - args, errs := p.preprocessVoidableExpressionsInModule(e.Args, module) + args, errs := p.preprocessVoidableExpressionsInModule(e.Args) nexpr, errors = &List{args}, errs case *Mul: - args, errs := p.preprocessExpressionsInModule(e.Args, module) + args, errs := p.preprocessExpressionsInModule(e.Args) nexpr, errors = &Mul{args}, errs case *Normalise: - arg, errs := p.preprocessExpressionInModule(e.Arg, module) + arg, errs := p.preprocessExpressionInModule(e.Arg) nexpr, errors = &Normalise{arg}, errs case *Reduce: - return p.preprocessReduceInModule(e, module) + return p.preprocessReduceInModule(e) case *Sub: - args, errs := p.preprocessExpressionsInModule(e.Args, module) + args, errs := p.preprocessExpressionsInModule(e.Args) nexpr, errors = &Sub{args}, errs case *Shift: - arg, errs := p.preprocessExpressionInModule(e.Arg, module) + arg, errs := p.preprocessExpressionInModule(e.Arg) nexpr, errors = &Shift{arg, e.Shift}, errs case *VariableAccess: return e, nil @@ -290,7 +290,7 @@ func (p *preprocessor) preprocessExpressionInModule(expr Expr, module string) (E return nexpr, errors } -func (p *preprocessor) preprocessForInModule(expr *For, module string) (Expr, []SyntaxError) { +func (p *preprocessor) preprocessForInModule(expr *For) (Expr, []SyntaxError) { var ( errors []SyntaxError mapping map[uint]Expr = make(map[uint]Expr) @@ -305,7 +305,7 @@ func (p *preprocessor) preprocessForInModule(expr *For, module string) (Expr, [] mapping[expr.Binding.index] = &Constant{*big.NewInt(int64(i + expr.Start))} ith := Substitute(expr.Body, mapping, p.srcmap) // preprocess subsituted expression - args[i], errs = p.preprocessExpressionInModule(ith, module) + args[i], errs = p.preprocessExpressionInModule(ith) errors = append(errors, errs...) } // Error check @@ -316,17 +316,17 @@ func (p *preprocessor) preprocessForInModule(expr *For, module string) (Expr, [] return &List{args}, nil } -func (p *preprocessor) preprocessInvokeInModule(expr *Invoke, module string) (Expr, []SyntaxError) { +func (p *preprocessor) preprocessInvokeInModule(expr *Invoke) (Expr, []SyntaxError) { if expr.signature != nil { body := expr.signature.Apply(expr.Args(), p.srcmap) - return p.preprocessExpressionInModule(body, module) + return p.preprocessExpressionInModule(body) } // return nil, p.srcmap.SyntaxErrors(expr, "unbound function") } -func (p *preprocessor) preprocessReduceInModule(expr *Reduce, module string) (Expr, []SyntaxError) { - body, errors := p.preprocessExpressionInModule(expr.arg, module) +func (p *preprocessor) preprocessReduceInModule(expr *Reduce) (Expr, []SyntaxError) { + body, errors := p.preprocessExpressionInModule(expr.arg) // if list, ok := body.(*List); !ok { return nil, append(errors, *p.srcmap.SyntaxError(expr.arg, "expected list")) diff --git a/pkg/corset/resolver.go b/pkg/corset/resolver.go index e1b7f187..16d84e75 100644 --- a/pkg/corset/resolver.go +++ b/pkg/corset/resolver.go @@ -12,14 +12,16 @@ import ( // a symbol (e.g. a column) is referred to which doesn't exist. Likewise, if // two modules or columns with identical names are declared in the same scope, // etc. -func ResolveCircuit(srcmap *sexp.SourceMaps[Node], circuit *Circuit) (*GlobalScope, []SyntaxError) { +func ResolveCircuit(srcmap *sexp.SourceMaps[Node], circuit *Circuit) (*ModuleScope, []SyntaxError) { // Construct top-level scope - scope := NewGlobalScope() - // Register the root module (which should always exist) - scope.DeclareModule("") - // Register other modules + scope := NewModuleScope() + // Define intrinsics + for _, i := range INTRINSICS { + scope.Define(&i) + } + // Register modules for _, m := range circuit.Modules { - scope.DeclareModule(m.Name) + scope.Declare(m.Name, false) } // Construct resolver r := resolver{srcmap} @@ -45,14 +47,14 @@ type resolver struct { } // Initialise all columns from their declaring constructs. -func (r *resolver) initialiseDeclarations(scope *GlobalScope, circuit *Circuit) []SyntaxError { +func (r *resolver) initialiseDeclarations(scope *ModuleScope, circuit *Circuit) []SyntaxError { // Input columns must be allocated before assignemts, since the hir.Schema // separates these out. - errs := r.initialiseDeclarationsInModule(scope.Module(""), circuit.Declarations) + errs := r.initialiseDeclarationsInModule(scope, circuit.Declarations) // for _, m := range circuit.Modules { // Process all declarations in the module - merrs := r.initialiseDeclarationsInModule(scope.Module(m.Name), m.Declarations) + merrs := r.initialiseDeclarationsInModule(scope.Enter(m.Name), m.Declarations) // Package up all errors errs = append(errs, merrs...) } @@ -63,14 +65,14 @@ func (r *resolver) initialiseDeclarations(scope *GlobalScope, circuit *Circuit) // Process all assignment column declarations. These are more complex than for // input columns, since there can be dependencies between them. Thus, we cannot // simply resolve them in one linear scan. -func (r *resolver) resolveDeclarations(scope *GlobalScope, circuit *Circuit) []SyntaxError { +func (r *resolver) resolveDeclarations(scope *ModuleScope, circuit *Circuit) []SyntaxError { // Input columns must be allocated before assignemts, since the hir.Schema // separates these out. - errs := r.resolveDeclarationsInModule(scope.Module(""), circuit.Declarations) + errs := r.resolveDeclarationsInModule(scope, circuit.Declarations) // for _, m := range circuit.Modules { // Process all declarations in the module - merrs := r.resolveDeclarationsInModule(scope.Module(m.Name), m.Declarations) + merrs := r.resolveDeclarationsInModule(scope.Enter(m.Name), m.Declarations) // Package up all errors errs = append(errs, merrs...) } @@ -99,13 +101,24 @@ func (r *resolver) resolveDeclarationsInModule(scope *ModuleScope, decls []Decla // contexts, etc). func (r *resolver) initialiseDeclarationsInModule(scope *ModuleScope, decls []Declaration) []SyntaxError { errors := make([]SyntaxError, 0) - // Initialise all columns + // First, initialise any perspectives as submodules of the given scope. Its + // slightly frustrating that we have to do this separately, but the + // non-lexical nature of perspectives forces our hand. + for _, d := range decls { + if def, ok := d.(*DefPerspective); ok { + // Attempt to declare the perspective. Note, we don't need to check + // whether or not this succeeds here as, if it fails, this will be + // caught below. + scope.Declare(def.Name(), true) + } + } + // Second, initialise all symbol (e.g. column) definitions. for _, d := range decls { for iter := d.Definitions(); iter.HasNext(); { def := iter.Next() // Attempt to declare symbol - if !scope.Declare(def) { - msg := fmt.Sprintf("symbol %s already declared", def.Name()) + if !scope.Define(def) { + msg := fmt.Sprintf("symbol %s already declared", def.Path()) err := r.srcmap.SyntaxError(def, msg) errors = append(errors, *err) } @@ -246,6 +259,17 @@ func (r *resolver) declarationDependenciesAreFinalised(scope *ModuleScope, errors []SyntaxError finalised bool = true ) + // DefConstraints require special handling because they can be associated + // with a perspective. Perspectives are challenging here because they are + // effectively non-lexical scopes, which is not a good fit for the module + // tree structure used. + if dc, ok := decl.(*DefConstraint); ok && dc.Perspective != nil { + if dc.Perspective.IsResolved() || scope.Bind(dc.Perspective) { + // Temporarily enter the perspective for the purposes of resolving + // symbols within this declaration. + scope = scope.Enter(dc.Perspective.Name()) + } + } // for iter := decl.Dependencies(); iter.HasNext(); { symbol := iter.Next() @@ -304,7 +328,7 @@ func (r *resolver) finaliseDefConstInModule(enclosing Scope, decl *DefConst) []S var errors []SyntaxError // for _, c := range decl.constants { - scope := NewLocalScope(enclosing, false, true, "") + scope := NewLocalScope(enclosing, false, true) // Resolve constant body datatype, errs := r.finaliseExpressionInModule(scope, c.binding.value) // Accumulate errors @@ -324,18 +348,19 @@ func (r *resolver) finaliseDefConstInModule(enclosing Scope, decl *DefConst) []S // Finalise a vanishing constraint declaration after all symbols have been // resolved. This involves: (a) checking the context is valid; (b) checking the // expressions are well-typed. -func (r *resolver) finaliseDefConstraintInModule(enclosing Scope, decl *DefConstraint) []SyntaxError { +func (r *resolver) finaliseDefConstraintInModule(enclosing *ModuleScope, decl *DefConstraint) []SyntaxError { var ( guard_errors []SyntaxError guard_t Type - perspective string = "" ) // Identifiery enclosing perspective (if applicable) if decl.Perspective != nil { - perspective = decl.Perspective.Name() + // As before, we must temporarily enter the perspective here. + perspective := decl.Perspective.Name() + enclosing = enclosing.Enter(perspective) } // Construct scope in which to resolve constraint - scope := NewLocalScope(enclosing, false, false, perspective) + scope := NewLocalScope(enclosing, false, false) // Resolve guard if decl.Guard != nil { guard_t, guard_errors = r.finaliseExpressionInModule(scope, decl.Guard) @@ -384,7 +409,7 @@ func (r *resolver) finaliseDefInterleavedInModule(decl *DefInterleaved) []Syntax datatype = binding.dataType } else if binding.multiplier != length_multiplier { // Columns to be interleaved must have the same length multiplier. - err := r.srcmap.SyntaxError(decl, fmt.Sprintf("source column %s has incompatible length multiplier", source.Name())) + err := r.srcmap.SyntaxError(decl, fmt.Sprintf("source column %s has incompatible length multiplier", source.Path())) errors = append(errors, *err) } // Combine datatypes. @@ -436,7 +461,7 @@ func (r *resolver) finaliseDefPermutationInModule(decl *DefPermutation) []Syntax // Resolve those variables appearing in the body of this property assertion. func (r *resolver) finaliseDefPerspectiveInModule(enclosing Scope, decl *DefPerspective) []SyntaxError { - scope := NewLocalScope(enclosing, false, false, "") + scope := NewLocalScope(enclosing, false, false) // Resolve assertion _, errors := r.finaliseExpressionInModule(scope, decl.Selector) // Error check @@ -452,7 +477,7 @@ func (r *resolver) finaliseDefPerspectiveInModule(enclosing Scope, decl *DefPers // expressions are well-typed. func (r *resolver) finaliseDefInRangeInModule(enclosing Scope, decl *DefInRange) []SyntaxError { var ( - scope = NewLocalScope(enclosing, false, false, "") + scope = NewLocalScope(enclosing, false, false) ) // Resolve property body _, errors := r.finaliseExpressionInModule(scope, decl.Expr) @@ -467,7 +492,7 @@ func (r *resolver) finaliseDefInRangeInModule(enclosing Scope, decl *DefInRange) // function. func (r *resolver) finaliseDefFunInModule(enclosing Scope, decl *DefFun) []SyntaxError { var ( - scope = NewLocalScope(enclosing, false, decl.IsPure(), "") + scope = NewLocalScope(enclosing, false, decl.IsPure()) ) // Declare parameters in local scope for _, p := range decl.Parameters() { @@ -486,8 +511,8 @@ func (r *resolver) finaliseDefFunInModule(enclosing Scope, decl *DefFun) []Synta // Resolve those variables appearing in the body of this lookup constraint. func (r *resolver) finaliseDefLookupInModule(enclosing Scope, decl *DefLookup) []SyntaxError { var ( - sourceScope = NewLocalScope(enclosing, true, false, "") - targetScope = NewLocalScope(enclosing, true, false, "") + sourceScope = NewLocalScope(enclosing, true, false) + targetScope = NewLocalScope(enclosing, true, false) ) // Resolve source expressions _, source_errors := r.finaliseExpressionsInModule(sourceScope, decl.Sources) @@ -499,7 +524,7 @@ func (r *resolver) finaliseDefLookupInModule(enclosing Scope, decl *DefLookup) [ // Resolve those variables appearing in the body of this property assertion. func (r *resolver) finaliseDefPropertyInModule(enclosing Scope, decl *DefProperty) []SyntaxError { - scope := NewLocalScope(enclosing, false, false, "") + scope := NewLocalScope(enclosing, false, false) // Resolve assertion _, errors := r.finaliseExpressionInModule(scope, decl.Assertion) // Done @@ -741,10 +766,8 @@ func (r *resolver) finaliseReduceInModule(scope LocalScope, expr *Reduce) (Type, func (r *resolver) finaliseVariableInModule(scope LocalScope, expr *VariableAccess) (Type, []SyntaxError) { // Check whether this is a qualified access, or not. - if !scope.IsGlobal() && expr.IsQualified() { + if !scope.IsGlobal() && expr.Path().IsAbsolute() { return nil, r.srcmap.SyntaxErrors(expr, "qualified access not permitted here") - } else if expr.IsQualified() && !scope.HasModule(expr.Module()) { - return nil, r.srcmap.SyntaxErrors(expr, fmt.Sprintf("unknown module %s", expr.Module())) } // Symbol should be resolved at this point, but we'd better sanity check this. if !expr.IsResolved() && !scope.Bind(expr) { @@ -759,8 +782,6 @@ func (r *resolver) finaliseVariableInModule(scope LocalScope, return nil, r.srcmap.SyntaxErrors(expr, "conflicting context") } else if scope.IsPure() { return nil, r.srcmap.SyntaxErrors(expr, "not permitted in pure context") - } else if binding.perspective != "" && scope.perspective != binding.perspective { - return nil, r.srcmap.SyntaxErrors(expr, "not visible here") } // Use column's datatype return binding.dataType, nil diff --git a/pkg/corset/scope.go b/pkg/corset/scope.go index 94414d61..d2e372af 100644 --- a/pkg/corset/scope.go +++ b/pkg/corset/scope.go @@ -4,6 +4,7 @@ import ( "fmt" tr "github.com/consensys/go-corset/pkg/trace" + "github.com/consensys/go-corset/pkg/util" ) // Scope represents a region of code in which an expression can be evaluated. @@ -11,142 +12,103 @@ import ( // variable used within an expression refers to. For example, a variable can // refer to a column, or a parameter, etc. type Scope interface { - // HasModule checks whether a given module exists, or not. - HasModule(string) bool // Attempt to bind a given symbol within this scope. If successful, the // symbol is then resolved with the appropriate binding. Return value // indicates whether successful or not. Bind(Symbol) bool } -// ============================================================================= -// Global Scope -// ============================================================================= - -// GlobalScope represents the top-level scope in a Corset file, and is used to -// glue the scopes for modules together. For example, it enables one module to -// lookup columns in another. -type GlobalScope struct { - // Top-level mapping of modules to their scopes. - ids map[string]uint - // List of modules in declaration order - modules []ModuleScope -} - -// NewGlobalScope constructs an empty global scope. -func NewGlobalScope() *GlobalScope { - return &GlobalScope{make(map[string]uint), make([]ModuleScope, 0)} -} - -// DeclareModule declares an initialises a new module within this global scope. -// If a module by the same name already exists, then this will panic. -func (p *GlobalScope) DeclareModule(module string) { - // Sanity check module doesn't already exist - if _, ok := p.ids[module]; ok { - panic(fmt.Sprintf("duplicate module %s declared", module)) - } - // Register module - id := uint(len(p.ids)) - p.modules = append(p.modules, ModuleScope{ - module, make(map[BindingId]uint), make([]Binding, 0), p, - }) - p.ids[module] = id - // Declare intrinsics (if applicable) - if module == "" { - for _, i := range INTRINSICS { - p.modules[id].Declare(&i) - } - } -} - -// HasModule checks whether a given module exists, or not. -func (p *GlobalScope) HasModule(module string) bool { - // Attempt to lookup the module - _, ok := p.ids[module] - // Return what we found - return ok -} - -// Bind looks up a given variable being referenced within a given module. For a -// root context, this is either a column, an alias or a function declaration. -func (p *GlobalScope) Bind(symbol Symbol) bool { - if !symbol.IsQualified() { - // Search for symbol in root module. - return p.Module("").Bind(symbol) - } else if !p.HasModule(symbol.Module()) { - // Pontially, it might be better to report a more useful error message. - return false - } - // - return p.Module(symbol.Module()).Bind(symbol) -} - -// Module returns the identifier of the module with the given name. Observe -// that this will panic if the module in question does not exist. -func (p *GlobalScope) Module(name string) *ModuleScope { - if mid, ok := p.ids[name]; ok { - return &p.modules[mid] - } - // Problem. - panic(fmt.Sprintf("unknown module \"%s\"", name)) -} - -// ToEnvironment converts this global scope into a concrete environment by -// allocating all columns within this scope. -func (p *GlobalScope) ToEnvironment(allocator func(RegisterAllocation)) Environment { - return NewGlobalEnvironment(p, allocator) +// BindingId is an identifier is used to distinguish different forms of binding, +// as some forms are known from their use. Specifically, at the current time, +// only functions are distinguished from other categories (e.g. columns, +// parameters, etc). +type BindingId struct { + // Name of the binding + name string + // Indicates whether function binding or other. + fn bool } // ============================================================================= // Module Scope // ============================================================================= -// ModuleScope represents the scope characterised by a module. +// ModuleScope defines recursive tree of scopes where symbols can be resolved +// and bound. The primary goal is to handle the various ways in which a +// symbol's qualified name (i.e. path) can be expressed. For example, a symbol +// can be given an absolute name (which is resolved from the root of the scope +// tree), or it can be relative (in which case it is resolved relative to a +// given module). type ModuleScope struct { - // Module name - module string - // Mapping from binding identifiers to indices within the bindings array. + // Absolute path + path util.Path + // Map identifiers to indices within the bindings array. ids map[BindingId]uint // The set of bindings in the order of declaration. bindings []Binding - // Enclosing global scope - enclosing Scope + // Enclosing scope + parent *ModuleScope + // Submodules in a map (for efficient lookup) + submodmap map[string]*ModuleScope + // Submodules in the order of declaration (for determinism). + submodules []*ModuleScope + // Indicates whether or not this is a real module. + virtual bool } -// EnclosingModule returns the name of the enclosing module. This is generally -// useful for reporting errors. -func (p *ModuleScope) EnclosingModule() string { - return p.module +// NewModuleScope constructs an initially empty top-level scope. +func NewModuleScope() *ModuleScope { + return &ModuleScope{ + util.NewAbsolutePath(), + make(map[BindingId]uint), + nil, + nil, + make(map[string]*ModuleScope), + nil, + false, + } } -// HasModule checks whether a given module exists, or not. -func (p *ModuleScope) HasModule(module string) bool { - return p.enclosing.HasModule(module) +// IsRoot checks whether or not this is the root of the module tree. +func (p *ModuleScope) IsRoot() bool { + return p.parent == nil } -// Bind looks up a given variable being referenced within a given module. For a -// root context, this is either a column, an alias or a function declaration. -func (p *ModuleScope) Bind(symbol Symbol) bool { - // Determine module for this lookup. - if symbol.IsQualified() && symbol.Module() != p.module { - // non-local lookup - return p.enclosing.Bind(symbol) +// Owner returns the enclosing non-virtual module of this module. Observe +// that, if this is a non-virtual module, then it is returned. +func (p *ModuleScope) Owner() *ModuleScope { + if !p.virtual { + return p + } else if p.parent != nil { + return p.parent.Owner() } - // construct binding identifier - id := BindingId{symbol.Name(), symbol.IsFunction()} - // Look for it. - if bid, ok := p.ids[id]; ok { - // Extract binding - binding := p.bindings[bid] - // Resolve symbol - return symbol.Resolve(binding) - } else if !symbol.IsQualified() && p.module != "" { - // Attempt to lookup in parent (unless we are the root module, in which - // case we have no parent) - return p.enclosing.Bind(symbol) + // Should be unreachable + panic("invalid module tree") +} + +// Declare a new submodule at the given (absolute) path within this tree scope. +// Submodules can be declared as "virtual" which indicates the submodule is +// simply a subset of rows of its enclosing module. This returns true if this +// succeeds, otherwise returns false (i.e. a matching submodule already exists). +func (p *ModuleScope) Declare(submodule string, virtual bool) bool { + if _, ok := p.submodmap[submodule]; ok { + return false } - // - return false + // Construct suitable child scope + scope := &ModuleScope{ + *p.path.Extend(submodule), + make(map[BindingId]uint), + nil, + p, + make(map[string]*ModuleScope), + nil, + virtual, + } + // Update records + p.submodmap[submodule] = scope + p.submodules = append(p.submodules, scope) + // Done + return true } // Binding returns information about the binding of a particular symbol defined @@ -160,17 +122,109 @@ func (p *ModuleScope) Binding(name string, function bool) Binding { return nil } -// Column returns information about a particular column declared within this -// module. -func (p *ModuleScope) Column(name string) *ColumnBinding { - // construct binding identifier - bid := p.ids[BindingId{name, false}] +// Bind looks up a given variable being referenced within a given module. For a +// root context, this is either a column, an alias or a function declaration. +func (p *ModuleScope) Bind(symbol Symbol) bool { + // Split the two cases: absolute versus relative. + if symbol.Path().IsAbsolute() && p.parent != nil { + // Absolute path, and this is not the root scope. Therefore, simply + // pass this up to the root scope for further processing. + return p.parent.Bind(symbol) + } + // Relative path from this scope, or possibly an absolute path if this is + // the root scope. + found := p.innerBind(symbol.Path(), symbol) + // If not found, traverse upwards. + if !found && p.parent != nil { + return p.parent.Bind(symbol) + } // - return p.bindings[bid].(*ColumnBinding) + return found } -// Declare declares a given binding within this module scope. -func (p *ModuleScope) Declare(symbol SymbolDefinition) bool { +// InnerBind is really a helper which allows us to split out the symbol path +// from the symbol itself. This then lets us "traverse" the path as we go +// looking through submodules, etc. +func (p *ModuleScope) innerBind(path *util.Path, symbol Symbol) bool { + // Relative path. Then, either it refers to something in this scope, or + // something in a subscope. + if path.Depth() == 1 { + // Must be something in this scope,. + id := BindingId{symbol.Path().Tail(), symbol.IsFunction()} + // Look for it. + if bid, ok := p.ids[id]; ok { + // Extract binding + binding := p.bindings[bid] + // Resolve symbol + return symbol.Resolve(binding) + } + } else if submod, ok := p.submodmap[path.Head()]; ok { + // Looks like this could be in the child scope, so continue searching there. + return submod.innerBind(path.Dehead(), symbol) + } + // Otherwise, try traversing upwards. + return false +} + +// Enter returns a given submodule within this module. +func (p *ModuleScope) Enter(submodule string) *ModuleScope { + if child, ok := p.submodmap[submodule]; ok { + // Looks like this is in the child scope, so continue searching there. + return child + } + // Should be unreachable. + panic("unknown submodule") +} + +// Alias constructs an alias for an existing symbol. If the symbol does not +// exist, then this returns false. +func (p *ModuleScope) Alias(alias string, symbol Symbol) bool { + // Sanity checks. These are required for now, since we cannot alias + // bindings in another scope at this time. + if symbol.Path().IsAbsolute() || symbol.Path().Depth() != 1 { + // This should be unreachable at the moment. + panic(fmt.Sprintf("qualified aliases not supported %s", symbol.Path().String())) + } + // Extract symbol name + name := symbol.Path().Head() + // construct symbol identifier + symbol_id := BindingId{name, symbol.IsFunction()} + // construct alias identifier + alias_id := BindingId{alias, symbol.IsFunction()} + // Check alias does not already exist + if _, ok := p.ids[alias_id]; !ok { + // Check symbol being aliased exists + if id, ok := p.ids[symbol_id]; ok { + p.ids[alias_id] = id + // Done + return true + } + } + // Symbol not known (yet) + return false +} + +// Define a new symbol within this scope. +func (p *ModuleScope) Define(symbol SymbolDefinition) bool { + // Sanity checks + if !symbol.Path().IsAbsolute() { + // Definitely should be unreachable. + panic("symbole definition cannot have relative path!") + } else if !p.path.PrefixOf(*symbol.Path()) { + // Should be unreachable. + err := fmt.Sprintf("invalid symbol definition (%s not prefix of %s)", p.path.String(), symbol.Path().String()) + panic(err) + } else if !symbol.Path().Parent().Equals(p.path) { + name := symbol.Path().Get(p.path.Depth()) + // Looks like this definition is for a submodule. Therefore, attempt to + // find it and then define it there. + if mod, ok := p.submodmap[name]; ok { + // Found it, so attempt definition. + return mod.Define(symbol) + } + // Failed + return false + } // construct binding identifier id := BindingId{symbol.Name(), symbol.IsFunction()} // Sanity check not already declared @@ -199,24 +253,16 @@ func (p *ModuleScope) Declare(symbol SymbolDefinition) bool { return true } -// Alias constructs an alias for an existing symbol. If the symbol does not -// exist, then this returns false. -func (p *ModuleScope) Alias(alias string, symbol Symbol) bool { - // construct symbol identifier - symbol_id := BindingId{symbol.Name(), symbol.IsFunction()} - // construct alias identifier - alias_id := BindingId{alias, symbol.IsFunction()} - // Check alias does not already exist - if _, ok := p.ids[alias_id]; !ok { - // Check symbol being aliased exists - if id, ok := p.ids[symbol_id]; ok { - p.ids[alias_id] = id - // Done - return true - } +// Flattern flatterns the tree into a flat array of modules, such that a module +// always comes before its own submodules. +func (p *ModuleScope) Flattern() []*ModuleScope { + modules := []*ModuleScope{p} + // + for _, m := range p.submodules { + modules = append(modules, m.Flattern()...) } - // Symbol not known (yet) - return false + // + return modules } // ============================================================================= @@ -236,8 +282,6 @@ type LocalScope struct { enclosing Scope // Context for this scope context *Context - // Perspective for this scope - perspective string // Maps inputs parameters to the declaration index. locals map[string]uint // Actual parameter bindings @@ -248,12 +292,12 @@ type LocalScope struct { // local scope can have local variables declared within it. A local scope can // also be "global" in the sense that accessing symbols from other modules is // permitted. -func NewLocalScope(enclosing Scope, global bool, pure bool, perspective string) LocalScope { +func NewLocalScope(enclosing Scope, global bool, pure bool) LocalScope { context := tr.VoidContext[string]() locals := make(map[string]uint) bindings := make([]*LocalVariableBinding, 0) // - return LocalScope{global, pure, enclosing, &context, perspective, locals, bindings} + return LocalScope{global, pure, enclosing, &context, locals, bindings} } // NestedScope creates a nested scope within this local scope. @@ -267,7 +311,7 @@ func (p LocalScope) NestedScope() LocalScope { // Copy over bindings. copy(nbindings, p.bindings) // Done - return LocalScope{p.global, p.pure, p, p.context, p.perspective, nlocals, nbindings} + return LocalScope{p.global, p.pure, p, p.context, nlocals, nbindings} } // NestedPureScope creates a nested scope within this local scope which, in @@ -282,7 +326,7 @@ func (p LocalScope) NestedPureScope() LocalScope { // Copy over bindings. copy(nbindings, p.bindings) // Done - return LocalScope{p.global, true, p, p.context, p.perspective, nlocals, nbindings} + return LocalScope{p.global, true, p, p.context, nlocals, nbindings} } // IsGlobal determines whether symbols can be accessed in modules other than the @@ -307,16 +351,14 @@ func (p LocalScope) FixContext(context Context) bool { return !p.context.IsConflicted() } -// HasModule checks whether a given module exists, or not. -func (p LocalScope) HasModule(module string) bool { - return p.enclosing.HasModule(module) -} - // Bind looks up a given variable or function being referenced either within the // enclosing scope (module==nil) or within a specified module. func (p LocalScope) Bind(symbol Symbol) bool { + path := symbol.Path() + // Determine whether this symbol could be a local variable or not. + localVar := !symbol.IsFunction() && !path.IsAbsolute() && path.Depth() == 1 // Check whether this is a local variable access. - if id, ok := p.locals[symbol.Name()]; ok && !symbol.IsFunction() && !symbol.IsQualified() { + if id, ok := p.locals[path.Head()]; ok && localVar { // Yes, this is a local variable access. return symbol.Resolve(p.bindings[id]) } diff --git a/pkg/corset/symbol.go b/pkg/corset/symbol.go index 76e995fe..1065778a 100644 --- a/pkg/corset/symbol.go +++ b/pkg/corset/symbol.go @@ -1,9 +1,8 @@ package corset import ( - "fmt" - "github.com/consensys/go-corset/pkg/sexp" + "github.com/consensys/go-corset/pkg/util" ) // Symbol represents a variable or function access within a declaration. @@ -12,17 +11,12 @@ import ( // a constant access, etc). type Symbol interface { Node - // Determines whether this symbol is qualfied or not (i.e. has an explicitly - // module specifier). - IsQualified() bool + // Path returns the given path of this symbol. + Path() *util.Path // Indicates whether or not this is a function. IsFunction() bool // Checks whether this symbol has been resolved already, or not. IsResolved() bool - // Optional module qualification - Module() string - // Name of the symbol - Name() string // Get binding associated with this interface. This will panic if this // symbol is not yet resolved. Binding() Binding @@ -34,22 +28,17 @@ type Symbol interface { Resolve(Binding) bool } -// QualifiedName returns the qualified name of a given symbol -func QualifiedName(symbol Symbol) string { - if symbol.IsQualified() { - return fmt.Sprintf("%s.%s", symbol.Module(), symbol.Name()) - } - // - return symbol.Name() -} - // SymbolDefinition represents a declaration (or part thereof) which defines a // particular symbol. For example, "defcolumns" will define one or more symbols // representing columns, etc. type SymbolDefinition interface { Node - // Name of symbol being defined + // Name returns the (unqualified) name of this symbol. For example, "X" for + // a column X defined in a module m1. Name() string + // Path returns the qualified name (i.e. absolute path) of this symbol. For + // example, "m1.X" for a column X defined in module m1. + Path() *util.Path // Indicates whether or not this is a function definition. IsFunction() bool // Allocated binding for the symbol which may or may not be finalised. @@ -61,8 +50,8 @@ type SymbolDefinition interface { type ColumnName = Name[*ColumnBinding] // NewColumnName construct a new column name which is (initially) unresolved. -func NewColumnName(name string) *ColumnName { - return &ColumnName{name, false, nil, false} +func NewColumnName(path util.Path) *ColumnName { + return &ColumnName{path, false, nil, false} } // FunctionName represents a name used in a position where it can only be @@ -70,8 +59,8 @@ func NewColumnName(name string) *ColumnName { type FunctionName = Name[*DefunBinding] // NewFunctionName construct a new column name which is (initially) unresolved. -func NewFunctionName(name string, binding *DefunBinding) *FunctionName { - return &FunctionName{name, true, binding, true} +func NewFunctionName(path util.Path, binding *DefunBinding) *FunctionName { + return &FunctionName{path, true, binding, true} } // PerspectiveName represents a name used in a position where it can only be @@ -79,8 +68,8 @@ func NewFunctionName(name string, binding *DefunBinding) *FunctionName { type PerspectiveName = Name[*PerspectiveBinding] // NewPerspectiveName construct a new column name which is (initially) unresolved. -func NewPerspectiveName(name string, binding *PerspectiveBinding) *PerspectiveName { - return &PerspectiveName{name, true, binding, true} +func NewPerspectiveName(path util.Path, binding *PerspectiveBinding) *PerspectiveName { + return &PerspectiveName{path, true, binding, true} } // Name represents a name within some syntactic item. Essentially this wraps a @@ -88,7 +77,7 @@ func NewPerspectiveName(name string, binding *PerspectiveBinding) *PerspectiveNa // information. type Name[T Binding] struct { // Name of symbol - name string + path util.Path // Indicates whether represents function or something else. function bool // Binding constructed for symbol. @@ -98,17 +87,23 @@ type Name[T Binding] struct { } // NewName construct a new name which is (initially) unresolved. -func NewName[T Binding](name string, function bool) *Name[T] { +func NewName[T Binding](path util.Path, function bool) *Name[T] { // Default value for type T var empty T // Construct the name - return &Name[T]{name, function, empty, false} + return &Name[T]{path, function, empty, false} +} + +// Name returns the (unqualified) name of this symbol. For example, "X" for +// a column X defined in a module m1. +func (e *Name[T]) Name() string { + return e.path.Tail() } -// IsQualified determines whether this symbol is qualfied or not (i.e. has an -// explicit module specifier). Column names are never qualified. -func (e *Name[T]) IsQualified() bool { - return false +// Path returns the qualified name (i.e. absolute path) of this symbol. For +// example, "m1.X" for a column X defined in module m1. +func (e *Name[T]) Path() *util.Path { + return &e.path } // IsFunction indicates whether or not this symbol refers to a function (which @@ -122,18 +117,6 @@ func (e *Name[T]) IsResolved() bool { return e.resolved } -// Module returns the optional module qualification. This always panics because -// column name's are never qualified. -func (e *Name[T]) Module() string { - panic("undefined") -} - -// Name returns the (unqualified) name of the column to which this symbol -// refers. -func (e *Name[T]) Name() string { - return e.name -} - // Binding gets binding associated with this interface. This will panic if this // symbol is not yet resolved. func (e *Name[T]) Binding() Binding { @@ -162,5 +145,5 @@ func (e *Name[T]) Resolve(binding Binding) bool { // Lisp converts this node into its lisp representation. This is primarily used // for debugging purposes. func (e *Name[T]) Lisp() sexp.SExp { - return sexp.NewSymbol(e.name) + return sexp.NewSymbol(e.path.String()) } diff --git a/pkg/corset/translator.go b/pkg/corset/translator.go index ab46efa6..982b0d51 100644 --- a/pkg/corset/translator.go +++ b/pkg/corset/translator.go @@ -10,6 +10,7 @@ import ( "github.com/consensys/go-corset/pkg/schema/assignment" "github.com/consensys/go-corset/pkg/sexp" tr "github.com/consensys/go-corset/pkg/trace" + "github.com/consensys/go-corset/pkg/util" ) // TranslateCircuit translates the components of a Corset circuit and add them @@ -163,10 +164,12 @@ func (t *translator) translateTypeConstraints(regIndex uint) { // Translate all assignment or constraint declarations in the circuit. func (t *translator) translateOtherDeclarations(circuit *Circuit) []SyntaxError { - errors := t.translateOtherDeclarationsInModule("", circuit.Declarations) + rootPath := util.NewAbsolutePath() + errors := t.translateOtherDeclarationsInModule(rootPath, circuit.Declarations) // Translate each module for _, m := range circuit.Modules { - errs := t.translateOtherDeclarationsInModule(m.Name, m.Declarations) + modPath := rootPath.Extend(m.Name) + errs := t.translateOtherDeclarationsInModule(*modPath, m.Declarations) errors = append(errors, errs...) } // Done @@ -175,7 +178,7 @@ func (t *translator) translateOtherDeclarations(circuit *Circuit) []SyntaxError // Translate all assignment or constraint declarations in a given module within // the circuit. -func (t *translator) translateOtherDeclarationsInModule(module string, decls []Declaration) []SyntaxError { +func (t *translator) translateOtherDeclarationsInModule(module util.Path, decls []Declaration) []SyntaxError { var errors []SyntaxError // for _, d := range decls { @@ -188,7 +191,7 @@ func (t *translator) translateOtherDeclarationsInModule(module string, decls []D // Translate an assignment or constraint declarartion which occurs within a // given module. -func (t *translator) translateDeclaration(decl Declaration, module string) []SyntaxError { +func (t *translator) translateDeclaration(decl Declaration, module util.Path) []SyntaxError { var errors []SyntaxError // switch d := decl.(type) { @@ -224,7 +227,7 @@ func (t *translator) translateDeclaration(decl Declaration, module string) []Syn } // Translate a "defconstraint" declaration. -func (t *translator) translateDefConstraint(decl *DefConstraint, module string) []SyntaxError { +func (t *translator) translateDefConstraint(decl *DefConstraint, module util.Path) []SyntaxError { // Translate constraint body constraint, errors := t.translateExpressionInModule(decl.Constraint, module, 0) // Translate (optional) guard @@ -270,7 +273,9 @@ func (t *translator) translateDefConstraint(decl *DefConstraint, module string) // Translate the selector for the perspective of a defconstraint. Observe that // a defconstraint may not be part of a perspective and, hence, would have no // selector. -func (t *translator) translateSelectorInModule(perspective *PerspectiveName, module string) (hir.Expr, []SyntaxError) { +func (t *translator) translateSelectorInModule(perspective *PerspectiveName, + module util.Path) (hir.Expr, []SyntaxError) { + // if perspective != nil { return t.translateExpressionInModule(perspective.binding.selector, module, 0) } @@ -281,7 +286,7 @@ func (t *translator) translateSelectorInModule(perspective *PerspectiveName, mod // Translate a "deflookup" declaration. // //nolint:staticcheck -func (t *translator) translateDefLookup(decl *DefLookup, module string) []SyntaxError { +func (t *translator) translateDefLookup(decl *DefLookup, module util.Path) []SyntaxError { // Translate source expressions sources, src_errs := t.translateUnitExpressionsInModule(decl.Sources, module, 0) targets, tgt_errs := t.translateUnitExpressionsInModule(decl.Targets, module, 0) @@ -299,7 +304,7 @@ func (t *translator) translateDefLookup(decl *DefLookup, module string) []Syntax } // Translate a "definrange" declaration. -func (t *translator) translateDefInRange(decl *DefInRange, module string) []SyntaxError { +func (t *translator) translateDefInRange(decl *DefInRange, module util.Path) []SyntaxError { // Translate constraint body expr, errors := t.translateExpressionInModule(decl.Expr, module, 0) // @@ -313,16 +318,17 @@ func (t *translator) translateDefInRange(decl *DefInRange, module string) []Synt } // Translate a "definterleaved" declaration. -func (t *translator) translateDefInterleaved(decl *DefInterleaved, module string) []SyntaxError { +func (t *translator) translateDefInterleaved(decl *DefInterleaved, module util.Path) []SyntaxError { var errors []SyntaxError // sources := make([]uint, len(decl.Sources)) // Lookup target column info - targetId := t.env.RegisterOf(module, decl.Target.Name()) + targetPath := module.Extend(decl.Target.Name()) + targetId := t.env.RegisterOf(targetPath) target := t.env.Register(targetId) // Determine source column identifiers for i, source := range decl.Sources { - sources[i] = t.env.RegisterOf(module, source.Name()) + sources[i] = t.env.RegisterOf(source.Path()) } // Register assignment cid := t.schema.AddAssignment(assignment.NewInterleaving(target.Context, target.Name, sources, target.DataType)) @@ -336,7 +342,7 @@ func (t *translator) translateDefInterleaved(decl *DefInterleaved, module string } // Translate a "defpermutation" declaration. -func (t *translator) translateDefPermutation(decl *DefPermutation, module string) []SyntaxError { +func (t *translator) translateDefPermutation(decl *DefPermutation, module util.Path) []SyntaxError { var ( errors []SyntaxError context tr.Context = tr.VoidContext[uint]() @@ -348,11 +354,12 @@ func (t *translator) translateDefPermutation(decl *DefPermutation, module string sources := make([]uint, len(decl.Sources)) // for i := 0; i < len(decl.Sources); i++ { - targetId := t.env.RegisterOf(module, decl.Targets[i].Name()) + targetPath := module.Extend(decl.Targets[i].Name()) + targetId := t.env.RegisterOf(targetPath) target := t.env.Register(targetId) // Construct columns targets[i] = sc.NewColumn(target.Context, target.Name, target.DataType) - sources[i] = t.env.RegisterOf(module, decl.Sources[i].Name()) + sources[i] = t.env.RegisterOf(decl.Sources[i].Path()) signs[i] = decl.Signs[i] // Record first CID if i == 0 { @@ -365,7 +372,7 @@ func (t *translator) translateDefPermutation(decl *DefPermutation, module string cid := t.schema.AddAssignment(assignment.NewSortedPermutation(context, targets, signs, sources)) // Sanity check column identifiers align. if cid != firstCid { - err := fmt.Sprintf("inconsitent (permuted) column identifier (%d v %d)", cid, firstCid) + err := fmt.Sprintf("inconsistent (permuted) column identifier (%d v %d)", cid, firstCid) errors = append(errors, *t.srcmap.SyntaxError(decl, err)) } // Done @@ -373,7 +380,7 @@ func (t *translator) translateDefPermutation(decl *DefPermutation, module string } // Translate a "defproperty" declaration. -func (t *translator) translateDefProperty(decl *DefProperty, module string) []SyntaxError { +func (t *translator) translateDefProperty(decl *DefProperty, module util.Path) []SyntaxError { // Translate constraint body assertion, errors := t.translateExpressionInModule(decl.Assertion, module, 0) // @@ -389,7 +396,7 @@ func (t *translator) translateDefProperty(decl *DefProperty, module string) []Sy // Translate an optional expression in a given context. That is an expression // which maybe nil (i.e. doesn't exist). In such case, nil is returned (i.e. // without any errors). -func (t *translator) translateOptionalExpressionInModule(expr Expr, module string, +func (t *translator) translateOptionalExpressionInModule(expr Expr, module util.Path, shift int) (hir.Expr, []SyntaxError) { // if expr != nil { @@ -402,7 +409,7 @@ func (t *translator) translateOptionalExpressionInModule(expr Expr, module strin // Translate an optional expression in a given context. That is an expression // which maybe nil (i.e. doesn't exist). In such case, nil is returned (i.e. // without any errors). -func (t *translator) translateUnitExpressionsInModule(exprs []Expr, module string, +func (t *translator) translateUnitExpressionsInModule(exprs []Expr, module util.Path, shift int) ([]hir.UnitExpr, []SyntaxError) { // errors := []SyntaxError{} @@ -421,7 +428,7 @@ func (t *translator) translateUnitExpressionsInModule(exprs []Expr, module strin } // Translate a sequence of zero or more expressions enclosed in a given module. -func (t *translator) translateExpressionsInModule(exprs []Expr, module string, +func (t *translator) translateExpressionsInModule(exprs []Expr, module util.Path, shift int) ([]hir.Expr, []SyntaxError) { // errors := []SyntaxError{} @@ -445,7 +452,7 @@ func (t *translator) translateExpressionsInModule(exprs []Expr, module string, // Translate an expression situated in a given context. The context is // necessary to resolve unqualified names (e.g. for column access, function // invocations, etc). -func (t *translator) translateExpressionInModule(expr Expr, module string, shift int) (hir.Expr, []SyntaxError) { +func (t *translator) translateExpressionInModule(expr Expr, module util.Path, shift int) (hir.Expr, []SyntaxError) { switch e := expr.(type) { case *ArrayAccess: return t.translateArrayAccessInModule(e, shift) @@ -520,14 +527,16 @@ func (t *translator) translateArrayAccessInModule(expr *ArrayAccess, shift int) return nil, errors } // Construct real column name - name := fmt.Sprintf("%s_%d", expr.Name(), index.Uint64()) + path := &binding.path + name := fmt.Sprintf("%s_%d", path.Tail(), index.Uint64()) + path = path.Parent().Extend(name) // Lookup underlying column info - registerId := t.env.RegisterOf(binding.module, name) + registerId := t.env.RegisterOf(path) // Done return &hir.ColumnAccess{Column: registerId, Shift: shift}, nil } -func (t *translator) translateExpInModule(expr *Exp, module string, shift int) (hir.Expr, []SyntaxError) { +func (t *translator) translateExpInModule(expr *Exp, module util.Path, shift int) (hir.Expr, []SyntaxError) { arg, errs := t.translateExpressionInModule(expr.Arg, module, shift) pow := expr.Pow.AsConstant() // Identity constant for pow @@ -544,7 +553,7 @@ func (t *translator) translateExpInModule(expr *Exp, module string, shift int) ( return nil, errs } -func (t *translator) translateShiftInModule(expr *Shift, module string, shift int) (hir.Expr, []SyntaxError) { +func (t *translator) translateShiftInModule(expr *Shift, module util.Path, shift int) (hir.Expr, []SyntaxError) { constant := expr.Shift.AsConstant() // Determine the shift constant if constant == nil { @@ -559,7 +568,7 @@ func (t *translator) translateShiftInModule(expr *Shift, module string, shift in func (t *translator) translateVariableAccessInModule(expr *VariableAccess, shift int) (hir.Expr, []SyntaxError) { if binding, ok := expr.Binding().(*ColumnBinding); ok { // Lookup column binding - register_id := t.env.RegisterOf(binding.module, expr.Name()) + register_id := t.env.RegisterOf(binding.AbsolutePath()) // Done return &hir.ColumnAccess{Column: register_id, Shift: shift}, nil } else if binding, ok := expr.Binding().(*ConstantBinding); ok { diff --git a/pkg/test/invalid_corset_test.go b/pkg/test/invalid_corset_test.go index 88090c94..3b096e77 100644 --- a/pkg/test/invalid_corset_test.go +++ b/pkg/test/invalid_corset_test.go @@ -631,10 +631,6 @@ func Test_Invalid_Perspective_03(t *testing.T) { CheckInvalid(t, "perspective_invalid_03") } -func Test_Invalid_Perspective_04(t *testing.T) { - CheckInvalid(t, "perspective_invalid_04") -} - func Test_Invalid_Perspective_05(t *testing.T) { CheckInvalid(t, "perspective_invalid_05") } @@ -643,8 +639,8 @@ func Test_Invalid_Perspective_06(t *testing.T) { CheckInvalid(t, "perspective_invalid_06") } -func Test_Invalid_Perspective_07(t *testing.T) { - CheckInvalid(t, "perspective_invalid_07") +func Test_Invalid_Perspective_08(t *testing.T) { + CheckInvalid(t, "perspective_invalid_08") } // =================================================================== diff --git a/pkg/test/valid_corset_test.go b/pkg/test/valid_corset_test.go index 4d571b57..f7d0eee8 100644 --- a/pkg/test/valid_corset_test.go +++ b/pkg/test/valid_corset_test.go @@ -107,6 +107,10 @@ func Test_Constant_10(t *testing.T) { Check(t, false, "constant_10") } +func Test_Constant_11(t *testing.T) { + Check(t, false, "constant_11") +} + // =================================================================== // Alias Tests // =================================================================== @@ -841,6 +845,32 @@ func Test_Perspective_20(t *testing.T) { Check(t, false, "perspective_20") } +func Test_Perspective_21(t *testing.T) { + Check(t, false, "perspective_21") +} + +func Test_Perspective_22(t *testing.T) { + Check(t, false, "perspective_22") +} + +func Test_Perspective_23(t *testing.T) { + Check(t, false, "perspective_23") +} + +func Test_Perspective_24(t *testing.T) { + Check(t, false, "perspective_24") +} + +// NOTE: this test could not currently pass because it results in a name clash +// between a symbol in the enclosing module, and one defined in a perspective. +// This is because of the weak naming scheme currently used for perspective +// columns to maintain backwards compatibility. When this restriction is +// lifted, this test can pass. +// +// func Test_Perspective_25(t *testing.T) { +// Check(t, false, "perspective_25") +// } + // =================================================================== // Complex Tests // =================================================================== diff --git a/pkg/util/arrays.go b/pkg/util/arrays.go index bf671d13..5a4335b3 100644 --- a/pkg/util/arrays.go +++ b/pkg/util/arrays.go @@ -6,6 +6,38 @@ import ( "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" ) +// Prepend creates a new slice containing the result of prepending the given +// item onto the end of the given slice. Observe that, unlike the built-in +// append() function, this will never modify the given slice. +func Prepend[T any](item T, slice []T) []T { + n := len(slice) + // Make space for new slice + nslice := make([]T, n+1) + // Copy existing values + copy(nslice[1:], slice) + // Set first value + nslice[0] = item + // Done + return nslice +} + +// Append creates a new slice containing the result of appending the given item +// onto the end of the given slice. Observe that, unlike the built-in append() +// function, this will never modify the given slice. +// +//nolint:revive +func Append[T any](slice []T, item T) []T { + n := len(slice) + // Make space for new slice + nslice := make([]T, n+1) + // Copy existing values + copy(nslice[:n], slice) + // Set last value + nslice[n] = item + // Done + return nslice +} + // ReplaceFirstOrPanic replaces the first occurrence of a given item (from) in an // array with another item (to). If not match is found, then this will panic. // In otherwords, we are expecting a match. diff --git a/pkg/util/path.go b/pkg/util/path.go new file mode 100644 index 00000000..46dbfad4 --- /dev/null +++ b/pkg/util/path.go @@ -0,0 +1,132 @@ +package util + +import ( + "fmt" + "slices" +) + +// Path is a construct for describing paths through trees. A path can be either +// *absolute* or *relative*. An absolute path always starts from the root of +// the tree, whilst a relative path can begin from any point within the tree. +type Path struct { + // Indicates whether or not this is an absolute path. + absolute bool + // Segments in the path. + segments []string +} + +// NewAbsolutePath constructs a new absolute path from the given segments. +func NewAbsolutePath(segments ...string) Path { + return Path{true, segments} +} + +// NewRelativePath constructs a new absolute path from the given segments. +func NewRelativePath(segments ...string) Path { + return Path{false, segments} +} + +// Depth returns the number of segments in this path (a.k.a its depth). +func (p *Path) Depth() uint { + return uint(len(p.segments)) +} + +// IsAbsolute determines whether or not this is an absolute path. +func (p *Path) IsAbsolute() bool { + return p.absolute +} + +// Head returns the first (i.e. outermost) segment in this path. +func (p *Path) Head() string { + return p.segments[0] +} + +// Dehead removes the head from this path, returning an otherwise identical +// path. Observe that, if this were absolute, it is no longer! +func (p *Path) Dehead() *Path { + return &Path{false, p.segments[1:]} +} + +// Tail returns the last (i.e. innermost) segment in this path. +func (p *Path) Tail() string { + n := len(p.segments) - 1 + return p.segments[n] +} + +// Get returns the nth segment of this path. +func (p *Path) Get(nth uint) string { + return p.segments[nth] +} + +// Equals determines whether two paths are the same. +func (p *Path) Equals(other Path) bool { + return p.absolute == other.absolute && slices.Equal(p.segments, other.segments) +} + +// PrefixOf checks whether this path is a prefix of the other. +func (p *Path) PrefixOf(other Path) bool { + if len(p.segments) > len(other.segments) { + return false + } + // + for i := range p.segments { + if p.segments[i] != other.segments[i] { + return false + } + } + // Looks good + return true +} + +// Slice returns the subpath starting from the given segment. +func (p *Path) Slice(start uint) *Path { + return &Path{false, p.segments[start:]} +} + +// PushRoot converts a relative path into an absolute path by pushing the "root" +// of the tree onto the head (i.e. outermost) position. +func (p *Path) PushRoot(tail string) *Path { + if p.absolute { + panic("cannot push root onto absolute path") + } + // Prepend root to segments + nsegments := Prepend(tail, p.segments) + // Convert to absolute path + return &Path{true, nsegments} +} + +// Parent returns the parent of this path. +func (p *Path) Parent() *Path { + n := p.Depth() - 1 + return &Path{p.absolute, p.segments[0:n]} +} + +// Extend returns this path extended with a new innermost segment. +func (p *Path) Extend(tail string) *Path { + return &Path{p.absolute, Append(p.segments, tail)} +} + +// Return a string representation of this path. +func (p *Path) String() string { + if p.IsAbsolute() { + switch len(p.segments) { + case 0: + return "" + case 1: + return p.segments[0] + case 2: + return fmt.Sprintf("%s.%s", p.segments[0], p.segments[1]) + default: + return fmt.Sprintf("%s/%s", p.Parent().String(), p.Tail()) + } + } + // + switch len(p.segments) { + case 0: + // Non-sensical case really + return "/" + case 1: + return fmt.Sprintf("/%s", p.segments[0]) + default: + return fmt.Sprintf("%s/%s", p.Parent().String(), p.Tail()) + } +} diff --git a/testdata/constant_11.accepts b/testdata/constant_11.accepts new file mode 100644 index 00000000..9750a89e --- /dev/null +++ b/testdata/constant_11.accepts @@ -0,0 +1,11 @@ +{"m1.X": [], "m1.Y": []} +{"m1.X": [-4], "m1.Y": [2]} +{"m1.X": [-2], "m1.Y": [1]} +{"m1.X": [0], "m1.Y": [0]} +{"m1.X": [2], "m1.Y": [-1]} +{"m1.X": [4], "m1.Y": [-2]} +{"m1.X": [0,-4], "m1.Y": [0,2]} +{"m1.X": [0,-2], "m1.Y": [0,1]} +{"m1.X": [0,0], "m1.Y": [0,0]} +{"m1.X": [0,2], "m1.Y": [0,-1]} +{"m1.X": [0,4], "m1.Y": [0,-2]} diff --git a/testdata/constant_11.lisp b/testdata/constant_11.lisp new file mode 100644 index 00000000..8f7f0c52 --- /dev/null +++ b/testdata/constant_11.lisp @@ -0,0 +1,17 @@ +(defpurefun ((vanishes! :@loob) x) x) + +(module m1) +(defconst + ONE 1 + TWO 2 +) + +(defcolumns X Y) +(defconstraint c1 () (vanishes! (+ X (* TWO Y)))) +(defconstraint c2 () (vanishes! (+ (* TWO Y) X))) +(defconstraint c3 () (vanishes! (+ X Y Y))) +(defconstraint c4 () (vanishes! (+ Y X Y))) +(defconstraint c5 () (vanishes! (+ Y Y X))) +(defconstraint c6 () (vanishes! (+ (* ONE X) Y Y))) +(defconstraint c7 () (vanishes! (+ Y (* ONE X) Y))) +(defconstraint c8 () (vanishes! (+ Y Y (* ONE X)))) diff --git a/testdata/constant_11.rejects b/testdata/constant_11.rejects new file mode 100644 index 00000000..321d4fcc --- /dev/null +++ b/testdata/constant_11.rejects @@ -0,0 +1,7 @@ +{"m1.X": [-2], "m1.Y": [-2]} +{"m1.X": [-2], "m1.Y": [-1]} +{"m1.X": [-2], "m1.Y": [0]} +{"m1.X": [-2], "m1.Y": [2]} +{"m1.X": [-2], "m1.Y": [3]} +{"m1.X": [0,1], "m1.Y": [0,1]} +{"m1.X": [-2,1], "m1.Y": [1,1]} diff --git a/testdata/perspective_21.accepts b/testdata/perspective_21.accepts new file mode 100644 index 00000000..91390f74 --- /dev/null +++ b/testdata/perspective_21.accepts @@ -0,0 +1,31 @@ +{ "A": [], "P": [], "Q": [], "B_xor_C": [] } +;; +{ "A": [0], "P": [0], "Q": [0], "B_xor_C": [0] } +{ "A": [0], "P": [0], "Q": [0], "B_xor_C": [1] } +{ "A": [0], "P": [0], "Q": [0], "B_xor_C": [2] } +{ "A": [0], "P": [0], "Q": [0], "B_xor_C": [3] } +{ "A": [1], "P": [0], "Q": [0], "B_xor_C": [0] } +{ "A": [1], "P": [0], "Q": [0], "B_xor_C": [1] } +{ "A": [1], "P": [0], "Q": [0], "B_xor_C": [2] } +{ "A": [1], "P": [0], "Q": [0], "B_xor_C": [3] } +{ "A": [2], "P": [0], "Q": [0], "B_xor_C": [0] } +{ "A": [2], "P": [0], "Q": [0], "B_xor_C": [1] } +{ "A": [2], "P": [0], "Q": [0], "B_xor_C": [2] } +{ "A": [2], "P": [0], "Q": [0], "B_xor_C": [3] } +{ "A": [3], "P": [0], "Q": [0], "B_xor_C": [0] } +{ "A": [3], "P": [0], "Q": [0], "B_xor_C": [1] } +{ "A": [3], "P": [0], "Q": [0], "B_xor_C": [2] } +{ "A": [3], "P": [0], "Q": [0], "B_xor_C": [3] } +;; perspective p1 +{ "A": [0], "P": [1], "Q": [0], "B_xor_C": [0] } +{ "A": [1], "P": [1], "Q": [0], "B_xor_C": [1] } +{ "A": [2], "P": [1], "Q": [0], "B_xor_C": [2] } +{ "A": [3], "P": [1], "Q": [0], "B_xor_C": [3] } +;; perspective p2 +{ "A": [0], "P": [0], "Q": [1], "B_xor_C": [0] } +{ "A": [0], "P": [0], "Q": [1], "B_xor_C": [1] } +{ "A": [0], "P": [0], "Q": [1], "B_xor_C": [2] } +{ "A": [0], "P": [0], "Q": [1], "B_xor_C": [3] } +{ "A": [1], "P": [0], "Q": [1], "B_xor_C": [0] } +{ "A": [2], "P": [0], "Q": [1], "B_xor_C": [0] } +{ "A": [3], "P": [0], "Q": [1], "B_xor_C": [0] } diff --git a/testdata/perspective_21.lisp b/testdata/perspective_21.lisp new file mode 100644 index 00000000..af14b7ff --- /dev/null +++ b/testdata/perspective_21.lisp @@ -0,0 +1,17 @@ +(defpurefun ((vanishes! :@loob :force) e0) e0) +;; +(defcolumns + ;; Column (not in perspective) + A + ;; Selector column for perspective p1 + (P :binary@prove) + ;; Selector column for perspective p2 + (Q :binary@prove)) + +;; Section 1 +(defperspective p1 P ((B :binary))) +(defconstraint c1 (:perspective p1) (vanishes! (- A p1/B))) + +;; Section 2 +(defperspective p2 Q ((C :binary))) +(defconstraint c2 (:perspective p2) (vanishes! (* A p2/C))) diff --git a/testdata/perspective_21.rejects b/testdata/perspective_21.rejects new file mode 100644 index 00000000..f13e9dea --- /dev/null +++ b/testdata/perspective_21.rejects @@ -0,0 +1,21 @@ +{ "A": [0], "P": [1], "Q": [0], "B_xor_C": [1] } +{ "A": [0], "P": [1], "Q": [0], "B_xor_C": [2] } +{ "A": [0], "P": [1], "Q": [0], "B_xor_C": [3] } +{ "A": [1], "P": [1], "Q": [0], "B_xor_C": [2] } +{ "A": [1], "P": [1], "Q": [0], "B_xor_C": [3] } +{ "A": [1], "P": [1], "Q": [0], "B_xor_C": [0] } +{ "A": [2], "P": [1], "Q": [0], "B_xor_C": [0] } +{ "A": [2], "P": [1], "Q": [0], "B_xor_C": [1] } +{ "A": [2], "P": [1], "Q": [0], "B_xor_C": [3] } +{ "A": [3], "P": [1], "Q": [0], "B_xor_C": [0] } +{ "A": [3], "P": [1], "Q": [0], "B_xor_C": [1] } +{ "A": [3], "P": [1], "Q": [0], "B_xor_C": [2] } +{ "A": [1], "P": [0], "Q": [1], "B_xor_C": [1] } +{ "A": [1], "P": [0], "Q": [1], "B_xor_C": [2] } +{ "A": [1], "P": [0], "Q": [1], "B_xor_C": [3] } +{ "A": [2], "P": [0], "Q": [1], "B_xor_C": [1] } +{ "A": [2], "P": [0], "Q": [1], "B_xor_C": [2] } +{ "A": [2], "P": [0], "Q": [1], "B_xor_C": [3] } +{ "A": [3], "P": [0], "Q": [1], "B_xor_C": [1] } +{ "A": [3], "P": [0], "Q": [1], "B_xor_C": [2] } +{ "A": [3], "P": [0], "Q": [1], "B_xor_C": [3] } diff --git a/testdata/perspective_22.accepts b/testdata/perspective_22.accepts new file mode 100644 index 00000000..91390f74 --- /dev/null +++ b/testdata/perspective_22.accepts @@ -0,0 +1,31 @@ +{ "A": [], "P": [], "Q": [], "B_xor_C": [] } +;; +{ "A": [0], "P": [0], "Q": [0], "B_xor_C": [0] } +{ "A": [0], "P": [0], "Q": [0], "B_xor_C": [1] } +{ "A": [0], "P": [0], "Q": [0], "B_xor_C": [2] } +{ "A": [0], "P": [0], "Q": [0], "B_xor_C": [3] } +{ "A": [1], "P": [0], "Q": [0], "B_xor_C": [0] } +{ "A": [1], "P": [0], "Q": [0], "B_xor_C": [1] } +{ "A": [1], "P": [0], "Q": [0], "B_xor_C": [2] } +{ "A": [1], "P": [0], "Q": [0], "B_xor_C": [3] } +{ "A": [2], "P": [0], "Q": [0], "B_xor_C": [0] } +{ "A": [2], "P": [0], "Q": [0], "B_xor_C": [1] } +{ "A": [2], "P": [0], "Q": [0], "B_xor_C": [2] } +{ "A": [2], "P": [0], "Q": [0], "B_xor_C": [3] } +{ "A": [3], "P": [0], "Q": [0], "B_xor_C": [0] } +{ "A": [3], "P": [0], "Q": [0], "B_xor_C": [1] } +{ "A": [3], "P": [0], "Q": [0], "B_xor_C": [2] } +{ "A": [3], "P": [0], "Q": [0], "B_xor_C": [3] } +;; perspective p1 +{ "A": [0], "P": [1], "Q": [0], "B_xor_C": [0] } +{ "A": [1], "P": [1], "Q": [0], "B_xor_C": [1] } +{ "A": [2], "P": [1], "Q": [0], "B_xor_C": [2] } +{ "A": [3], "P": [1], "Q": [0], "B_xor_C": [3] } +;; perspective p2 +{ "A": [0], "P": [0], "Q": [1], "B_xor_C": [0] } +{ "A": [0], "P": [0], "Q": [1], "B_xor_C": [1] } +{ "A": [0], "P": [0], "Q": [1], "B_xor_C": [2] } +{ "A": [0], "P": [0], "Q": [1], "B_xor_C": [3] } +{ "A": [1], "P": [0], "Q": [1], "B_xor_C": [0] } +{ "A": [2], "P": [0], "Q": [1], "B_xor_C": [0] } +{ "A": [3], "P": [0], "Q": [1], "B_xor_C": [0] } diff --git a/testdata/perspective_22.lisp b/testdata/perspective_22.lisp new file mode 100644 index 00000000..290d296a --- /dev/null +++ b/testdata/perspective_22.lisp @@ -0,0 +1,17 @@ +(defpurefun ((vanishes! :@loob :force) e0) e0) +;; +(defcolumns + ;; Column (not in perspective) + A + ;; Selector column for perspective p1 + (P :binary@prove) + ;; Selector column for perspective p2 + (Q :binary@prove)) + +;; Section 1 +(defperspective p1 P ((B :binary))) +(defconstraint c1 (:perspective p1) (vanishes! (- A p2/C))) + +;; Section 2 +(defperspective p2 Q ((C :binary))) +(defconstraint c2 (:perspective p2) (vanishes! (* A p1/B))) diff --git a/testdata/perspective_22.rejects b/testdata/perspective_22.rejects new file mode 100644 index 00000000..f13e9dea --- /dev/null +++ b/testdata/perspective_22.rejects @@ -0,0 +1,21 @@ +{ "A": [0], "P": [1], "Q": [0], "B_xor_C": [1] } +{ "A": [0], "P": [1], "Q": [0], "B_xor_C": [2] } +{ "A": [0], "P": [1], "Q": [0], "B_xor_C": [3] } +{ "A": [1], "P": [1], "Q": [0], "B_xor_C": [2] } +{ "A": [1], "P": [1], "Q": [0], "B_xor_C": [3] } +{ "A": [1], "P": [1], "Q": [0], "B_xor_C": [0] } +{ "A": [2], "P": [1], "Q": [0], "B_xor_C": [0] } +{ "A": [2], "P": [1], "Q": [0], "B_xor_C": [1] } +{ "A": [2], "P": [1], "Q": [0], "B_xor_C": [3] } +{ "A": [3], "P": [1], "Q": [0], "B_xor_C": [0] } +{ "A": [3], "P": [1], "Q": [0], "B_xor_C": [1] } +{ "A": [3], "P": [1], "Q": [0], "B_xor_C": [2] } +{ "A": [1], "P": [0], "Q": [1], "B_xor_C": [1] } +{ "A": [1], "P": [0], "Q": [1], "B_xor_C": [2] } +{ "A": [1], "P": [0], "Q": [1], "B_xor_C": [3] } +{ "A": [2], "P": [0], "Q": [1], "B_xor_C": [1] } +{ "A": [2], "P": [0], "Q": [1], "B_xor_C": [2] } +{ "A": [2], "P": [0], "Q": [1], "B_xor_C": [3] } +{ "A": [3], "P": [0], "Q": [1], "B_xor_C": [1] } +{ "A": [3], "P": [0], "Q": [1], "B_xor_C": [2] } +{ "A": [3], "P": [0], "Q": [1], "B_xor_C": [3] } diff --git a/testdata/perspective_23.accepts b/testdata/perspective_23.accepts new file mode 100644 index 00000000..44bcda8e --- /dev/null +++ b/testdata/perspective_23.accepts @@ -0,0 +1,31 @@ +{ "A": [], "P": [], "Q": [], "B_xor_B": [] } +;; +{ "A": [0], "P": [0], "Q": [0], "B_xor_B": [0] } +{ "A": [0], "P": [0], "Q": [0], "B_xor_B": [1] } +{ "A": [0], "P": [0], "Q": [0], "B_xor_B": [2] } +{ "A": [0], "P": [0], "Q": [0], "B_xor_B": [3] } +{ "A": [1], "P": [0], "Q": [0], "B_xor_B": [0] } +{ "A": [1], "P": [0], "Q": [0], "B_xor_B": [1] } +{ "A": [1], "P": [0], "Q": [0], "B_xor_B": [2] } +{ "A": [1], "P": [0], "Q": [0], "B_xor_B": [3] } +{ "A": [2], "P": [0], "Q": [0], "B_xor_B": [0] } +{ "A": [2], "P": [0], "Q": [0], "B_xor_B": [1] } +{ "A": [2], "P": [0], "Q": [0], "B_xor_B": [2] } +{ "A": [2], "P": [0], "Q": [0], "B_xor_B": [3] } +{ "A": [3], "P": [0], "Q": [0], "B_xor_B": [0] } +{ "A": [3], "P": [0], "Q": [0], "B_xor_B": [1] } +{ "A": [3], "P": [0], "Q": [0], "B_xor_B": [2] } +{ "A": [3], "P": [0], "Q": [0], "B_xor_B": [3] } +;; perspective p1 +{ "A": [0], "P": [1], "Q": [0], "B_xor_B": [0] } +{ "A": [1], "P": [1], "Q": [0], "B_xor_B": [1] } +{ "A": [2], "P": [1], "Q": [0], "B_xor_B": [2] } +{ "A": [3], "P": [1], "Q": [0], "B_xor_B": [3] } +;; perspective p2 +{ "A": [0], "P": [0], "Q": [1], "B_xor_B": [0] } +{ "A": [0], "P": [0], "Q": [1], "B_xor_B": [1] } +{ "A": [0], "P": [0], "Q": [1], "B_xor_B": [2] } +{ "A": [0], "P": [0], "Q": [1], "B_xor_B": [3] } +{ "A": [1], "P": [0], "Q": [1], "B_xor_B": [0] } +{ "A": [2], "P": [0], "Q": [1], "B_xor_B": [0] } +{ "A": [3], "P": [0], "Q": [1], "B_xor_B": [0] } diff --git a/testdata/perspective_23.lisp b/testdata/perspective_23.lisp new file mode 100644 index 00000000..6e8e12c0 --- /dev/null +++ b/testdata/perspective_23.lisp @@ -0,0 +1,17 @@ +(defpurefun ((vanishes! :@loob :force) e0) e0) +;; +(defcolumns + ;; Column (not in perspective) + A + ;; Selector column for perspective p1 + (P :binary@prove) + ;; Selector column for perspective p2 + (Q :binary@prove)) + +;; Section 1 +(defperspective p1 P ((B :binary))) +(defconstraint c1 (:perspective p1) (vanishes! (- A B))) + +;; Section 2 +(defperspective p2 Q ((B :binary))) +(defconstraint c2 (:perspective p2) (vanishes! (* A B))) diff --git a/testdata/perspective_23.rejects b/testdata/perspective_23.rejects new file mode 100644 index 00000000..49126192 --- /dev/null +++ b/testdata/perspective_23.rejects @@ -0,0 +1,21 @@ +{ "A": [0], "P": [1], "Q": [0], "B_xor_B": [1] } +{ "A": [0], "P": [1], "Q": [0], "B_xor_B": [2] } +{ "A": [0], "P": [1], "Q": [0], "B_xor_B": [3] } +{ "A": [1], "P": [1], "Q": [0], "B_xor_B": [2] } +{ "A": [1], "P": [1], "Q": [0], "B_xor_B": [3] } +{ "A": [1], "P": [1], "Q": [0], "B_xor_B": [0] } +{ "A": [2], "P": [1], "Q": [0], "B_xor_B": [0] } +{ "A": [2], "P": [1], "Q": [0], "B_xor_B": [1] } +{ "A": [2], "P": [1], "Q": [0], "B_xor_B": [3] } +{ "A": [3], "P": [1], "Q": [0], "B_xor_B": [0] } +{ "A": [3], "P": [1], "Q": [0], "B_xor_B": [1] } +{ "A": [3], "P": [1], "Q": [0], "B_xor_B": [2] } +{ "A": [1], "P": [0], "Q": [1], "B_xor_B": [1] } +{ "A": [1], "P": [0], "Q": [1], "B_xor_B": [2] } +{ "A": [1], "P": [0], "Q": [1], "B_xor_B": [3] } +{ "A": [2], "P": [0], "Q": [1], "B_xor_B": [1] } +{ "A": [2], "P": [0], "Q": [1], "B_xor_B": [2] } +{ "A": [2], "P": [0], "Q": [1], "B_xor_B": [3] } +{ "A": [3], "P": [0], "Q": [1], "B_xor_B": [1] } +{ "A": [3], "P": [0], "Q": [1], "B_xor_B": [2] } +{ "A": [3], "P": [0], "Q": [1], "B_xor_B": [3] } diff --git a/testdata/perspective_24.accepts b/testdata/perspective_24.accepts new file mode 100644 index 00000000..22753f7d --- /dev/null +++ b/testdata/perspective_24.accepts @@ -0,0 +1,219 @@ +{ "P": [], "Q": [], "B_1_xor_C_1": [], "B_2_xor_C_2": [] } +;; +{ "P": [0], "Q": [0], "B_1_xor_C_1": [0], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [1], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [2], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [3], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [256], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [423], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [551], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [892], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [1011], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [2022], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [4483], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [8919], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [10101], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [16868], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [32378], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65534], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65535], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65536], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [0], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [1], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [2], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [3], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [256], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [423], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [551], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [892], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [1011], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [2022], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [4483], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [8919], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [10101], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [16868], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [32378], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65534], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65535], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65536], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [0], "B_2_xor_C_2": [255] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [1], "B_2_xor_C_2": [255] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [2], "B_2_xor_C_2": [255] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [3], "B_2_xor_C_2": [255] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [256], "B_2_xor_C_2": [255] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [423], "B_2_xor_C_2": [255] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [551], "B_2_xor_C_2": [255] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [892], "B_2_xor_C_2": [255] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [1011], "B_2_xor_C_2": [255] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [2022], "B_2_xor_C_2": [255] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [4483], "B_2_xor_C_2": [255] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [8919], "B_2_xor_C_2": [255] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [10101], "B_2_xor_C_2": [255] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [16868], "B_2_xor_C_2": [255] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [32378], "B_2_xor_C_2": [255] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65534], "B_2_xor_C_2": [255] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65535], "B_2_xor_C_2": [255] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65536], "B_2_xor_C_2": [255] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [0], "B_2_xor_C_2": [256] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [1], "B_2_xor_C_2": [256] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [2], "B_2_xor_C_2": [256] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [3], "B_2_xor_C_2": [256] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [256], "B_2_xor_C_2": [256] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [423], "B_2_xor_C_2": [256] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [551], "B_2_xor_C_2": [256] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [892], "B_2_xor_C_2": [256] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [1011], "B_2_xor_C_2": [256] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [2022], "B_2_xor_C_2": [256] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [4483], "B_2_xor_C_2": [256] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [8919], "B_2_xor_C_2": [256] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [10101], "B_2_xor_C_2": [256] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [16868], "B_2_xor_C_2": [256] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [32378], "B_2_xor_C_2": [256] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65534], "B_2_xor_C_2": [256] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65535], "B_2_xor_C_2": [256] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65536], "B_2_xor_C_2": [256] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [0], "B_2_xor_C_2": [444] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [1], "B_2_xor_C_2": [444] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [2], "B_2_xor_C_2": [444] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [3], "B_2_xor_C_2": [444] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [256], "B_2_xor_C_2": [444] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [423], "B_2_xor_C_2": [444] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [551], "B_2_xor_C_2": [444] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [892], "B_2_xor_C_2": [444] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [1011], "B_2_xor_C_2": [444] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [2022], "B_2_xor_C_2": [444] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [4483], "B_2_xor_C_2": [444] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [8919], "B_2_xor_C_2": [444] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [10101], "B_2_xor_C_2": [444] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [16868], "B_2_xor_C_2": [444] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [32378], "B_2_xor_C_2": [444] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65534], "B_2_xor_C_2": [444] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65535], "B_2_xor_C_2": [444] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65536], "B_2_xor_C_2": [444] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [0], "B_2_xor_C_2": [1023] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [1], "B_2_xor_C_2": [1023] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [2], "B_2_xor_C_2": [1023] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [3], "B_2_xor_C_2": [1023] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [256], "B_2_xor_C_2": [1023] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [423], "B_2_xor_C_2": [1023] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [551], "B_2_xor_C_2": [1023] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [892], "B_2_xor_C_2": [1023] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [1011], "B_2_xor_C_2": [1023] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [2022], "B_2_xor_C_2": [1023] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [4483], "B_2_xor_C_2": [1023] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [8919], "B_2_xor_C_2": [1023] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [10101], "B_2_xor_C_2": [1023] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [16868], "B_2_xor_C_2": [1023] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [32378], "B_2_xor_C_2": [1023] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65534], "B_2_xor_C_2": [1023] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65535], "B_2_xor_C_2": [1023] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65536], "B_2_xor_C_2": [1023] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [0], "B_2_xor_C_2": [16385] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [1], "B_2_xor_C_2": [16385] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [2], "B_2_xor_C_2": [16385] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [3], "B_2_xor_C_2": [16385] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [256], "B_2_xor_C_2": [16385] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [423], "B_2_xor_C_2": [16385] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [551], "B_2_xor_C_2": [16385] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [892], "B_2_xor_C_2": [16385] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [1011], "B_2_xor_C_2": [16385] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [2022], "B_2_xor_C_2": [16385] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [4483], "B_2_xor_C_2": [16385] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [8919], "B_2_xor_C_2": [16385] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [10101], "B_2_xor_C_2": [16385] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [16868], "B_2_xor_C_2": [16385] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [32378], "B_2_xor_C_2": [16385] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65534], "B_2_xor_C_2": [16385] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65535], "B_2_xor_C_2": [16385] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65536], "B_2_xor_C_2": [16385] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [0], "B_2_xor_C_2": [65535] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [1], "B_2_xor_C_2": [65535] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [2], "B_2_xor_C_2": [65535] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [3], "B_2_xor_C_2": [65535] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [256], "B_2_xor_C_2": [65535] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [423], "B_2_xor_C_2": [65535] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [551], "B_2_xor_C_2": [65535] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [892], "B_2_xor_C_2": [65535] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [1011], "B_2_xor_C_2": [65535] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [2022], "B_2_xor_C_2": [65535] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [4483], "B_2_xor_C_2": [65535] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [8919], "B_2_xor_C_2": [65535] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [10101], "B_2_xor_C_2": [65535] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [16868], "B_2_xor_C_2": [65535] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [32378], "B_2_xor_C_2": [65535] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65534], "B_2_xor_C_2": [65535] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65535], "B_2_xor_C_2": [65535] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65536], "B_2_xor_C_2": [65535] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [0], "B_2_xor_C_2": [65536] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [1], "B_2_xor_C_2": [65536] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [2], "B_2_xor_C_2": [65536] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [3], "B_2_xor_C_2": [65536] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [256], "B_2_xor_C_2": [65536] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [423], "B_2_xor_C_2": [65536] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [551], "B_2_xor_C_2": [65536] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [892], "B_2_xor_C_2": [65536] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [1011], "B_2_xor_C_2": [65536] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [2022], "B_2_xor_C_2": [65536] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [4483], "B_2_xor_C_2": [65536] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [8919], "B_2_xor_C_2": [65536] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [10101], "B_2_xor_C_2": [65536] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [16868], "B_2_xor_C_2": [65536] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [32378], "B_2_xor_C_2": [65536] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65534], "B_2_xor_C_2": [65536] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65535], "B_2_xor_C_2": [65536] } +{ "P": [0], "Q": [0], "B_1_xor_C_1": [65536], "B_2_xor_C_2": [65536] } +;; perspective p1 +{ "P": [1], "Q": [0], "B_1_xor_C_1": [0], "B_2_xor_C_2": [0] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [1], "B_2_xor_C_2": [1] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [2], "B_2_xor_C_2": [2] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [3], "B_2_xor_C_2": [3] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [256], "B_2_xor_C_2": [256] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [423], "B_2_xor_C_2": [423] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [551], "B_2_xor_C_2": [551] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [892], "B_2_xor_C_2": [892] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [1011], "B_2_xor_C_2": [1011] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [2022], "B_2_xor_C_2": [2022] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [4483], "B_2_xor_C_2": [4483] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [8919], "B_2_xor_C_2": [8919] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [10101], "B_2_xor_C_2": [10101] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [16868], "B_2_xor_C_2": [16868] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [32378], "B_2_xor_C_2": [32378] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [65534], "B_2_xor_C_2": [65534] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [65535], "B_2_xor_C_2": [65535] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [65536], "B_2_xor_C_2": [65536] } +;; perspective p2 +{ "P": [0], "Q": [1], "B_1_xor_C_1": [0], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [1], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [0], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [2], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [0], "B_2_xor_C_2": [2] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [3], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [0], "B_2_xor_C_2": [3] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [256], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [0], "B_2_xor_C_2": [256] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [423], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [0], "B_2_xor_C_2": [423] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [551], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [0], "B_2_xor_C_2": [551] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [892], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [0], "B_2_xor_C_2": [892] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [1011], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [0], "B_2_xor_C_2": [1011] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [2022], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [0], "B_2_xor_C_2": [2022] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [4483], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [0], "B_2_xor_C_2": [4483] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [8919], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [0], "B_2_xor_C_2": [8919] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [10101], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [0], "B_2_xor_C_2": [10101] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [16868], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [0], "B_2_xor_C_2": [16868] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [32378], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [0], "B_2_xor_C_2": [32378] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [65534], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [0], "B_2_xor_C_2": [65534] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [65535], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [0], "B_2_xor_C_2": [65535] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [65536], "B_2_xor_C_2": [0] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [0], "B_2_xor_C_2": [65536] } diff --git a/testdata/perspective_24.lisp b/testdata/perspective_24.lisp new file mode 100644 index 00000000..03ec18c1 --- /dev/null +++ b/testdata/perspective_24.lisp @@ -0,0 +1,15 @@ +(defpurefun ((vanishes! :@loob :force) e0) e0) +;; +(defcolumns + ;; Selector column for perspective p1 + (P :binary@prove) + ;; Selector column for perspective p2 + (Q :binary@prove)) + +;; Section 1 +(defperspective p1 P ((B :binary :array [2]))) +(defconstraint c1 (:perspective p1) (vanishes! (- [B 1] [B 2]))) + +;; Section 2 +(defperspective p2 Q ((C :binary :array [2]))) +(defconstraint c2 (:perspective p2) (vanishes! (* [C 1] [C 2]))) diff --git a/testdata/perspective_24.rejects b/testdata/perspective_24.rejects new file mode 100644 index 00000000..a85e1606 --- /dev/null +++ b/testdata/perspective_24.rejects @@ -0,0 +1,54 @@ +;; perspective p1 +{ "P": [1], "Q": [0], "B_1_xor_C_1": [0], "B_2_xor_C_2": [1] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [0], "B_2_xor_C_2": [1] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [1], "B_2_xor_C_2": [2] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [2], "B_2_xor_C_2": [3] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [226], "B_2_xor_C_2": [256] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [42], "B_2_xor_C_2": [423] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [5511], "B_2_xor_C_2": [551] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [8342], "B_2_xor_C_2": [892] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [1111], "B_2_xor_C_2": [1011] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [20222], "B_2_xor_C_2": [2022] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [448], "B_2_xor_C_2": [4483] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [89], "B_2_xor_C_2": [8919] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [11], "B_2_xor_C_2": [10101] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [1668], "B_2_xor_C_2": [16868] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [33378], "B_2_xor_C_2": [32378] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [63534], "B_2_xor_C_2": [65534] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [65435], "B_2_xor_C_2": [65535] } +{ "P": [1], "Q": [0], "B_1_xor_C_1": [65539], "B_2_xor_C_2": [65536] } +;; perspective p2 +{ "P": [0], "Q": [1], "B_1_xor_C_1": [1], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [2], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [2], "B_2_xor_C_2": [3] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [4], "B_2_xor_C_2": [2] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [3], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [2], "B_2_xor_C_2": [3] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [256], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [255], "B_2_xor_C_2": [256] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [423], "B_2_xor_C_2": [256] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [1], "B_2_xor_C_2": [423] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [551], "B_2_xor_C_2": [233] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [8192], "B_2_xor_C_2": [551] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [892], "B_2_xor_C_2": [12] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [9812], "B_2_xor_C_2": [892] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [1011], "B_2_xor_C_2": [10] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [901], "B_2_xor_C_2": [1011] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [2022], "B_2_xor_C_2": [121] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [2], "B_2_xor_C_2": [2022] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [4483], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [9999], "B_2_xor_C_2": [4483] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [8919], "B_2_xor_C_2": [12] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [897], "B_2_xor_C_2": [8919] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [10101], "B_2_xor_C_2": [123] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [98], "B_2_xor_C_2": [10101] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [16868], "B_2_xor_C_2": [9] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [1], "B_2_xor_C_2": [16868] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [32378], "B_2_xor_C_2": [65535] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [65536], "B_2_xor_C_2": [32378] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [65534], "B_2_xor_C_2": [3278] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [255], "B_2_xor_C_2": [65534] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [65535], "B_2_xor_C_2": [256] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [101], "B_2_xor_C_2": [65535] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [65536], "B_2_xor_C_2": [1] } +{ "P": [0], "Q": [1], "B_1_xor_C_1": [1], "B_2_xor_C_2": [65536] } diff --git a/testdata/perspective_invalid_04.lisp b/testdata/perspective_25.lisp similarity index 100% rename from testdata/perspective_invalid_04.lisp rename to testdata/perspective_25.lisp diff --git a/testdata/perspective_invalid_02.lisp b/testdata/perspective_invalid_02.lisp index 09e2d02c..5bf3d49a 100644 --- a/testdata/perspective_invalid_02.lisp +++ b/testdata/perspective_invalid_02.lisp @@ -1,5 +1,5 @@ -;;error:15:53-54:not visible here -;;error:16:53-54:not visible here +;;error:15:53-54:unknown symbol +;;error:16:53-54:unknown symbol (defpurefun ((vanishes! :@loob :force) e0) e0) ;; (defcolumns diff --git a/testdata/perspective_invalid_03.lisp b/testdata/perspective_invalid_03.lisp index 15aeeac5..56e7abaa 100644 --- a/testdata/perspective_invalid_03.lisp +++ b/testdata/perspective_invalid_03.lisp @@ -1,5 +1,5 @@ -;;error:15:38-39:not visible here -;;error:16:38-39:not visible here +;;error:15:38-39:unknown symbol +;;error:16:38-39:unknown symbol (defpurefun ((vanishes! :@loob :force) e0) e0) ;; (defcolumns diff --git a/testdata/perspective_invalid_06.lisp b/testdata/perspective_invalid_06.lisp index e6987a1b..a940f4e7 100644 --- a/testdata/perspective_invalid_06.lisp +++ b/testdata/perspective_invalid_06.lisp @@ -1,5 +1,5 @@ -;;error:14:20-21:not visible here -;;error:15:16-17:not visible here +;;error:14:20-21:unknown symbol +;;error:15:16-17:unknown symbol (defpurefun ((vanishes! :@loob :force) e0) e0) ;; (defcolumns diff --git a/testdata/perspective_invalid_08.lisp b/testdata/perspective_invalid_08.lisp new file mode 100644 index 00000000..5fe4a310 --- /dev/null +++ b/testdata/perspective_invalid_08.lisp @@ -0,0 +1,17 @@ +;;error:14:53-57:unknown symbol +(defpurefun ((vanishes! :@loob :force) e0) e0) +;; +(defcolumns + ;; Column (not in perspective) + A + ;; Selector column for perspective p1 + (P :binary@prove) + ;; Selector column for perspective p2 + (Q :binary@prove)) + +;; Section 1 +(defperspective p1 P ((B :binary))) +(defconstraint c1 (:perspective p1) (vanishes! (- A p2/B))) + +;; Section 2 +(defperspective p2 Q ((C :binary)))