From 2e58e496cb30950957408b53df75c465eeada330 Mon Sep 17 00:00:00 2001 From: Sergio Medina Date: Mon, 16 Dec 2024 15:22:18 +0000 Subject: [PATCH] feat: change MIR representation to protobuf --- nada_dsl/ast_util.py | 388 +++++++++--------- nada_dsl/compile.py | 4 +- nada_dsl/compiler_frontend.py | 329 +++++++++------ nada_dsl/future/operations.py | 8 +- nada_dsl/nada_types/collections.py | 132 +++--- nada_dsl/nada_types/function.py | 44 +- nada_dsl/nada_types/scalar_types.py | 30 +- nada_dsl/operations.py | 73 +++- nada_dsl/program_io.py | 11 +- nada_dsl/source_ref.py | 19 +- nada_mir/MANIFEST.in | 8 + nada_mir/proto/nillion/nada/v1/mir.proto | 114 +++++ .../proto/nillion/nada/v1/operations.proto | 215 ++++++++++ nada_mir/proto/nillion/nada/v1/types.proto | 56 +++ nada_mir/pyproject.toml | 11 +- nada_mir/scripts/gen_proto.sh | 9 +- .../nillion/nada/mir/v1/__init__.py | 50 +-- .../nillion/nada/operations/v1/__init__.py | 286 ++++++------- .../nillion/nada/types/v1/__init__.py | 100 ++--- pyproject.toml | 2 + tests/compile_test.py | 117 +++--- tests/compiler_frontend_test.py | 342 ++++++++------- uv.lock | 115 +++++- 23 files changed, 1545 insertions(+), 918 deletions(-) create mode 100644 nada_mir/MANIFEST.in create mode 100644 nada_mir/proto/nillion/nada/v1/mir.proto create mode 100644 nada_mir/proto/nillion/nada/v1/operations.proto create mode 100644 nada_mir/proto/nillion/nada/v1/types.proto diff --git a/nada_dsl/ast_util.py b/nada_dsl/ast_util.py index 93915f2..6a05d1a 100644 --- a/nada_dsl/ast_util.py +++ b/nada_dsl/ast_util.py @@ -1,21 +1,33 @@ """AST utilities.""" -from abc import ABC +from abc import ABC, abstractmethod from dataclasses import dataclass import hashlib from typing import Dict, List from sortedcontainers import SortedDict -from nada_dsl.nada_types import DslTypeRepr, Party -from nada_dsl.source_ref import SourceRef +from betterproto.lib.google.protobuf import Empty + +from nada_mir_proto.nillion.nada.operations import v1 as proto_op +from nada_mir_proto.nillion.nada.types import v1 as proto_ty +from nada_mir_proto.nillion.nada.mir import v1 as proto_mir -OPERATION_ID_COUNTER = 0 +from nada_dsl.nada_types import Party +from nada_dsl.source_ref import SourceRef -def next_operation_id() -> int: - """Returns the next value of the operation id counter.""" - global OPERATION_ID_COUNTER - OPERATION_ID_COUNTER += 1 - return OPERATION_ID_COUNTER +class OperationId: + """Operation identifier generator.""" + current = 0 + + @classmethod + def next(cls): + next_op_id = cls.current + cls.current += 1 + return next_op_id + + @classmethod + def reset(cls): + cls.current = 0 @dataclass @@ -41,15 +53,17 @@ class ASTOperation(ABC): id: int source_ref: SourceRef - ty: DslTypeRepr + ty: proto_ty.NadaType + @abstractmethod def child_operations(self) -> List[int]: """Returns the list of identifiers of all the child operations of this operation.""" - return [] + raise NotImplementedError("Operation should implement child_operations method") - def to_mir(self): + @abstractmethod + def to_mir(self) -> proto_op.Operation: """Converts this AST Operation into a valid MIR data structure""" - return {} + raise NotImplementedError("Operation should implement to_mir method") # Map of operations identified by the Python compiler @@ -64,44 +78,46 @@ def to_mir(self): class BinaryASTOperation(ASTOperation): """Superclass of all the Binary operations in AST representation""" - name: str + variant: proto_op.BinaryOperationVariant left: int right: int def child_operations(self) -> List[int]: return [self.left, self.right] - def to_mir(self): - return { - self.name: { - "id": self.id, - "left": self.left, - "right": self.right, - "type": self.ty, - "source_ref_index": self.source_ref.to_index(), - } - } + def to_mir(self) -> proto_op.Operation: + return proto_op.Operation( + id=self.id, + type=self.ty, + source_ref_index=self.source_ref.to_index(), + binary=proto_op.BinaryOperation( + variant=self.variant, + left=self.left, + right=self.right, + ), + ) @dataclass class UnaryASTOperation(ASTOperation): """Superclass of all the unary operations in AST representation""" - name: str + variant: proto_op.UnaryOperationVariant child: int def child_operations(self): return [self.child] - def to_mir(self): - return { - self.name: { - "id": self.id, - "this": self.child, - "type": self.ty, - "source_ref_index": self.source_ref.to_index(), - } - } + def to_mir(self) -> proto_op.Operation: + return proto_op.Operation( + id=self.id, + type=self.ty, + source_ref_index=self.source_ref.to_index(), + unary=proto_op.UnaryOperation( + variant=self.variant, + this=self.child, + ), + ) @dataclass @@ -115,17 +131,17 @@ class IfElseASTOperation(ASTOperation): def child_operations(self): return [self.condition, self.true_branch_child, self.false_branch_child] - def to_mir(self): - return { - "IfElse": { - "id": self.id, - "this": self.condition, - "arg_0": self.true_branch_child, - "arg_1": self.false_branch_child, - "type": self.ty, - "source_ref_index": self.source_ref.to_index(), - } - } + def to_mir(self) -> proto_op.Operation: + return proto_op.Operation( + id=self.id, + type=self.ty, + source_ref_index=self.source_ref.to_index(), + ifelse=proto_op.IfElseOperation( + cond=self.condition, + first=self.true_branch_child, + second=self.false_branch_child, + ), + ) @dataclass @@ -135,14 +151,13 @@ class RandomASTOperation(ASTOperation): def child_operations(self): return [] - def to_mir(self): - return { - "Random": { - "id": self.id, - "type": self.ty, - "source_ref_index": self.source_ref.to_index(), - } - } + def to_mir(self) -> proto_op.Operation: + return proto_op.Operation( + id=self.id, + type=self.ty, + source_ref_index=self.source_ref.to_index(), + random=Empty(), + ) @dataclass @@ -153,15 +168,18 @@ class InputASTOperation(ASTOperation): party: Party doc: str - def to_mir(self): - return { - "InputReference": { - "id": self.id, - "refers_to": self.name, - "type": self.ty, - "source_ref_index": self.source_ref.to_index(), - } - } + def to_mir(self) -> proto_op.Operation: + return proto_op.Operation( + id=self.id, + type=self.ty, + source_ref_index=self.source_ref.to_index(), + input_ref=proto_op.InputReference( + refers_to=self.name, + ), + ) + + def child_operations(self) -> List[int]: + return [] @dataclass @@ -199,15 +217,18 @@ def __init__( super().__init__(id=self.id, source_ref=self.source_ref, ty=self.ty) - def to_mir(self): - return { - "LiteralReference": { - "id": self.id, - "refers_to": self.literal_index, - "type": self.ty, - "source_ref_index": self.source_ref.to_index(), - } - } + def to_mir(self) -> proto_op.Operation: + return proto_op.Operation( + id=self.id, + type=self.ty, + source_ref_index=self.source_ref.to_index(), + literal_ref=proto_op.LiteralReference( + refers_to=self.literal_index, + ), + ) + + def child_operations(self) -> List[int]: + return [] @dataclass @@ -221,17 +242,17 @@ class ReduceASTOperation(ASTOperation): def child_operations(self): return [self.child, self.initial] - def to_mir(self): - return { - "Reduce": { - "id": self.id, - "fn": self.fn, - "inner": self.child, - "initial": self.initial, - "type": self.ty, - "source_ref_index": self.source_ref.to_index(), - } - } + def to_mir(self) -> proto_op.Operation: + return proto_op.Operation( + id=self.id, + type=self.ty, + source_ref_index=self.source_ref.to_index(), + reduce=proto_op.ReduceOperation( + fn=self.fn, + child=self.child, + initial=self.initial, + ), + ) @dataclass @@ -244,16 +265,16 @@ class MapASTOperation(ASTOperation): def child_operations(self): return [self.child] - def to_mir(self): - return { - "Map": { - "id": self.id, - "fn": self.fn, - "inner": self.child, - "type": self.ty, - "source_ref_index": self.source_ref.to_index(), - } - } + def to_mir(self) -> proto_op.Operation: + return proto_op.Operation( + id=self.id, + type=self.ty, + source_ref_index=self.source_ref.to_index(), + map=proto_op.MapOperation( + fn=self.fn, + child=self.child, + ), + ) @dataclass @@ -266,38 +287,15 @@ class NewASTOperation(ASTOperation): def child_operations(self): return self.elements - def to_mir(self): - return { - "New": { - "id": self.id, - "elements": self.elements, - "type": self.ty, - "source_ref_index": self.source_ref.to_index(), - } - } - - -@dataclass -class NadaFunctionCallASTOperation(ASTOperation): - """AST representation of a NadaFunctionCall operation.""" - - args: List[int] - fn: int - - def child_operations(self): - return self.args - - def to_mir(self): - return { - "NadaFunctionCall": { - "id": self.id, - "function_id": self.fn, - "args": self.args, - "type": self.ty, - "source_ref_index": self.source_ref.to_index(), - "return_type": self.ty, - } - } + def to_mir(self) -> proto_op.Operation: + return proto_op.Operation( + id=self.id, + type=self.ty, + source_ref_index=self.source_ref.to_index(), + new=proto_op.NewOperation( + elements=self.elements, + ), + ) @dataclass @@ -307,16 +305,19 @@ class NadaFunctionArgASTOperation(ASTOperation): name: str fn: int - def to_mir(self): - return { - "NadaFunctionArgRef": { - "id": self.id, - "function_id": self.fn, - "refers_to": self.name, - "type": self.ty, - "source_ref_index": self.source_ref.to_index(), - } - } + def to_mir(self) -> proto_op.Operation: + return proto_op.Operation( + id=self.id, + type=self.ty, + source_ref_index=self.source_ref.to_index(), + arg_ref=proto_op.NadaFunctionArgRef( + function_id=self.fn, + refers_to=self.name, + ) + ) + + def child_operations(self) -> List[int]: + return [] @dataclass @@ -330,30 +331,30 @@ class NadaFunctionASTOperation(ASTOperation): # pylint: disable=arguments-differ def to_mir(self, operations): """Convert a function to MIR.""" - arg_operations: List[NadaFunctionArgASTOperation] = [ - AST_OPERATIONS[arg] for arg in self.args + args: List[proto_mir.NadaFunctionArg] = [ + proto_mir.NadaFunctionArg( + name=AST_OPERATIONS[arg].name, + type=AST_OPERATIONS[arg].ty, + source_ref_index=AST_OPERATIONS[arg].source_ref.to_index(), + ) + for arg in self.args ] # type: ignore - - return { - "id": self.id, - "args": [ - { - "name": arg.name, - "type": arg.ty, - "source_ref_index": arg.source_ref.to_index(), - } - for arg in arg_operations - ], - "function": self.name, - "return_operation_id": self.child, - "operations": operations, - "return_type": self.ty, - "source_ref_index": self.source_ref.to_index(), - } + return proto_mir.NadaFunction( + id=self.id, + args=args, + name=self.name, + return_operation_id=self.child, + operations=operations, + return_type=self.ty, + source_ref_index=self.source_ref.to_index(), + ) def __hash__(self) -> int: return self.id + def child_operations(self) -> List[int]: + return self.args + [self.child] + # Partially implemented @dataclass @@ -365,16 +366,16 @@ class CastASTOperation(ASTOperation): def child_operations(self): return [self.target] - def to_mir(self): - return { - "Cast": { - "id": self.id, - "target": self.target, - "to": self.ty, - "type": self.ty, - "source_ref_index": self.source_ref.to_index(), - } - } + def to_mir(self) -> proto_op.Operation: + return proto_op.Operation( + id=self.id, + type=self.ty, + source_ref_index=self.source_ref.to_index(), + cast=proto_op.CastOperation( + target=self.target, + cast_to=self.ty, + ), + ) @dataclass @@ -387,16 +388,17 @@ class TupleAccessorASTOperation(ASTOperation): def child_operations(self): return [self.source] - def to_mir(self): - return { - "TupleAccessor": { - "id": self.id, - "index": self.index, - "source": self.source, - "type": self.ty, - "source_ref_index": self.source_ref.to_index(), - } - } + def to_mir(self) -> proto_op.Operation: + return proto_op.Operation( + id=self.id, + type=self.ty, + source_ref_index=self.source_ref.to_index(), + tuple_accessor=proto_op.TupleAccessor( + index=proto_op.TupleIndex.LEFT + if self.index == 0 + else proto_op.TupleIndex.RIGHT, + ), + ) @dataclass @@ -409,16 +411,16 @@ class NTupleAccessorASTOperation(ASTOperation): def child_operations(self): return [self.source] - def to_mir(self): - return { - "NTupleAccessor": { - "id": self.id, - "index": self.index, - "source": self.source, - "type": self.ty, - "source_ref_index": self.source_ref.to_index(), - } - } + def to_mir(self) -> proto_op.Operation: + return proto_op.Operation( + id=self.id, + type=self.ty, + source_ref_index=self.source_ref.to_index(), + ntuple_accessor=proto_op.NtupleAccessor( + index=self.index, + source=self.source, + ), + ) @dataclass @@ -431,13 +433,13 @@ class ObjectAccessorASTOperation(ASTOperation): def child_operations(self): return [self.source] - def to_mir(self): - return { - "ObjectAccessor": { - "id": self.id, - "key": self.key, - "source": self.source, - "type": self.ty, - "source_ref_index": self.source_ref.to_index(), - } - } + def to_mir(self) -> proto_op.Operation: + return proto_op.Operation( + id=self.id, + type=self.ty, + source_ref_index=self.source_ref.to_index(), + object_accessor=proto_op.ObjectAccessor( + key=self.key, + source=self.source, + ), + ) diff --git a/nada_dsl/compile.py b/nada_dsl/compile.py index 216ab4b..79a2bb4 100644 --- a/nada_dsl/compile.py +++ b/nada_dsl/compile.py @@ -18,7 +18,7 @@ class CompilerOutput: """Compiler Output""" - mir: str + mir: bytes @add_timer(timer_name="nada_dsl.compile.compile") @@ -82,7 +82,7 @@ def print_output(out: CompilerOutput): """ output_json = { "result": "Success", - "mir": out.mir, + "mir": list(out.mir), } print(json.dumps(output_json)) diff --git a/nada_dsl/compiler_frontend.py b/nada_dsl/compiler_frontend.py index f8c5b41..c19a92d 100644 --- a/nada_dsl/compiler_frontend.py +++ b/nada_dsl/compiler_frontend.py @@ -3,14 +3,16 @@ that constitute the Nada embedded domain-specific language (EDSL). """ -from dataclasses import dataclass -import json +from dataclasses import dataclass, field import os -from json import JSONEncoder -import inspect -from typing import List, Dict, Any, Optional, Tuple +from typing import List, Dict, Optional, Tuple from sortedcontainers import SortedDict +from nada_dsl import Party +from nada_mir_proto.nillion.nada.mir import v1 as proto_mir +from nada_mir_proto.nillion.nada.operations import v1 as proto_op +from nada_mir_proto.nillion.nada.types import v1 as proto_ty + from nada_dsl.ast_util import ( AST_OPERATIONS, ASTOperation, @@ -24,7 +26,6 @@ NTupleAccessorASTOperation, NadaFunctionASTOperation, NadaFunctionArgASTOperation, - NadaFunctionCallASTOperation, NewASTOperation, ObjectAccessorASTOperation, RandomASTOperation, @@ -35,19 +36,12 @@ from nada_dsl.source_ref import SourceRef from nada_dsl.program_io import Output -INPUTS = SortedDict() -PARTIES = SortedDict() -FUNCTIONS: Dict[int, NadaFunctionASTOperation] = {} -LITERALS: Dict[str, Tuple[str, object]] = {} - - -class ClassEncoder(JSONEncoder): - """Custom JSON encoder for classes.""" - - def default(self, o): - if inspect.isclass(o): - return o.__name__ - return {type(o).__name__: o.__dict__} +@dataclass +class CompilationContext: + inputs: Dict[Tuple[str, str], InputASTOperation] = field(default_factory=lambda: SortedDict()) + parties: Dict[str, Party] = field(default_factory=lambda: SortedDict()) + functions: Dict[int, NadaFunctionASTOperation] = field(default_factory=lambda: {}) + literals: Dict[str, Tuple[str, proto_ty.NadaType]] = field(default_factory=lambda: {}) def get_target_dir() -> str: @@ -66,102 +60,100 @@ def get_target_dir() -> str: return os.path.join(cwd, "target") -def nada_compile(outputs: List[Output]) -> str: +def nada_compile(outputs: List[Output]) -> bytes: """Compile Nada to MIR and dump it as JSON.""" compiled = nada_dsl_to_nada_mir(outputs) - return json.dumps(compiled) + return bytes(compiled) -def nada_dsl_to_nada_mir(outputs: List[Output]) -> Dict[str, Any]: +def nada_dsl_to_nada_mir(outputs: List[Output]) -> proto_mir.ProgramMir: """Convert Nada DSL to Nada MIR.""" new_outputs = [] - PARTIES.clear() - INPUTS.clear() - LITERALS.clear() - operations: Dict[int, Dict] = {} + ctx = CompilationContext() + operations: Dict[int, proto_op.Operation] = SortedDict() # Process outputs for output in outputs: timer.start( f"nada_dsl.compiler_frontend.nada_dsl_to_nada_mir.{output.name}.process_operation" ) out_operation_id = output.child.child.id - extra_fns = traverse_and_process_operations( - out_operation_id, operations, FUNCTIONS - ) - FUNCTIONS.update(extra_fns) - + traverse_and_process_operations(out_operation_id, operations, ctx) timer.stop( f"nada_dsl.compiler_frontend.nada_dsl_to_nada_mir.{output.name}.process_operation" ) party = output.party - PARTIES[party.name] = party + ctx.parties[party.name] = party new_outputs.append( - { - "operation_id": out_operation_id, - "name": output.name, - "party": party.name, - "type": AST_OPERATIONS[out_operation_id].ty, - "source_ref_index": output.source_ref.to_index(), - } + proto_mir.Output( + operation_id=out_operation_id, + name=output.name, + party=party.name, + type=AST_OPERATIONS[out_operation_id].ty, + source_ref_index=output.source_ref.to_index(), + ) ) - # Now we go through all the discovered functions and see if they are - # invoking other functions, which we will need to process and add to the FUNCTIONS dictionary - - return { - "functions": to_mir_function_list(FUNCTIONS), - "parties": to_party_list(PARTIES), - "inputs": to_input_list(INPUTS), - "literals": to_literal_list(LITERALS), - "outputs": new_outputs, - "operations": operations, - "source_files": SourceRef.get_sources(), - "source_refs": SourceRef.get_refs(), - } - - -def to_party_list(parties) -> List[Dict]: + + operations = [proto_mir.OperationMapEntry(id=id, operation=op) for id, op in operations.items()] + + mir = proto_mir.ProgramMir( + functions=process_functions(ctx), + parties=to_party_list(ctx.parties), + inputs=to_input_list(ctx.inputs), + literals=to_literal_list(ctx.literals), + outputs=new_outputs, + operations=operations, + source_files=SourceRef.get_sources(), + source_refs=SourceRef.get_refs(), + ) + return mir + + +def to_party_list(parties: Dict[str, Party]) -> List[proto_mir.Party]: """Convert parties to a list in MIR format.""" return [ - { - "name": party.name, - "source_ref_index": party.source_ref.to_index(), - } + proto_mir.Party( + name=party.name, + source_ref_index=party.source_ref.to_index(), + ) for party in parties.values() ] -def to_input_list(inputs) -> List[Dict]: +def to_input_list(inputs: Dict[int, InputASTOperation]) -> List[proto_mir.Input]: """Convert inputs to a list in MIR format.""" input_list = [] - for party_inputs in inputs.values(): - for program_input, program_type in party_inputs.values(): + for input_ast in inputs.values(): input_list.append( - { - "name": program_input.name, - "type": program_type, - "party": program_input.party.name, - "doc": program_input.doc, - "source_ref_index": program_input.source_ref.to_index(), - } + proto_mir.Input( + name=input_ast.name, + type=input_ast.ty, + party=input_ast.party.name, + doc=input_ast.doc, + source_ref_index=input_ast.source_ref.to_index(), + ) ) return input_list -def to_literal_list(literals: Dict[str, Tuple[str, object]]) -> List[Dict]: +def to_literal_list( + literals: Dict[str, Tuple[str, proto_ty.NadaType]], +) -> List[proto_mir.Literal]: """Convert literals to a list in MIR format.""" literal_list = [] for name, (value, ty) in literals.items(): literal_list.append( - { - "name": name, - "value": str(value), - "type": ty, - } + proto_mir.Literal( + name=name, + value=value, + type=ty, + ) ) return literal_list -def to_mir_function_list(functions: Dict[int, NadaFunctionASTOperation]) -> List[Dict]: +def process_functions( + ctx: CompilationContext, +) -> List[proto_mir.NadaFunction]: """Convert functions to a list in MIR format. From a starting dictionary of functions, it traverses each one of them, @@ -181,36 +173,34 @@ def to_mir_function_list(functions: Dict[int, NadaFunctionASTOperation]) -> List A dictionary containing a starting list of functions """ mir_functions = [] - stack = list(functions.values()) + stack = list(ctx.functions.values()) + ctx.functions = {} while len(stack) > 0: function = stack.pop() - function_operations = {} + function_operations = SortedDict() - extra_functions = traverse_and_process_operations( + traverse_and_process_operations( function.child, function_operations, - functions, + ctx, ) - if extra_functions: - stack.extend(extra_functions.values()) - functions.update(extra_functions) + if len(ctx.functions) > 0: + stack.extend(ctx.functions.values()) + ctx.functions = {} + + function_operations = [proto_mir.OperationMapEntry(id=id, operation=op) for id, op in function_operations.items()] + mir_functions.append(function.to_mir(function_operations)) return mir_functions -def add_input_to_map(operation: InputASTOperation): +def add_input_to_map(operation: InputASTOperation, ctx: CompilationContext) -> proto_op.Operation: """Adds an input to the global INPUTS dictionary""" - party_name = operation.party.name - PARTIES[party_name] = operation.party - if party_name not in INPUTS: - INPUTS[party_name] = {} - if ( - operation.name in INPUTS[party_name] - and INPUTS[party_name][operation.name][0].id != operation.id - ): + ctx.parties[operation.party.name] = operation.party + if (operation.party.name, operation.name) in ctx.inputs and ctx.inputs[(operation.party.name, operation.name)].id != operation.id: raise CompilerException(f"Input is duplicated: {operation.name}") - INPUTS[party_name][operation.name] = (operation, operation.ty) + ctx.inputs[(operation.party.name, operation.name)] = operation return operation.to_mir() @@ -220,8 +210,8 @@ class CompilerException(Exception): def traverse_and_process_operations( operation_id: int, - operations: Dict[int, Dict], - functions: Dict[int, NadaFunctionASTOperation], + operations: Dict[int, proto_op.Operation], + ctx: CompilationContext, ) -> Dict[int, NadaFunctionASTOperation]: """Traverses the AST operations finding all the operation tree rooted at the given operation. Uses an iterative DFS algorithm. @@ -246,33 +236,20 @@ def traverse_and_process_operations( Dictionary with all the new functions being found while traversing the operation tree """ - extra_functions = {} stack = [operation_id] while len(stack) > 0: operation_id = stack.pop() if operation_id not in operations: operation = AST_OPERATIONS[operation_id] - wrapped_operation = process_operation(operation, functions) - operations[operation_id] = wrapped_operation.mir - if wrapped_operation.extra_function: - extra_functions[wrapped_operation.extra_function.id] = ( - wrapped_operation.extra_function - ) + maybe_op = process_operation(operation, ctx) + if maybe_op is not None: + operations[operation_id] = maybe_op stack.extend(operation.child_operations()) - return extra_functions - - -@dataclass -class ProcessOperationOutput: - """Output of the process_operation function""" - - mir: Dict[str, Dict] - extra_function: Optional[NadaFunctionASTOperation] def process_operation( - operation: ASTOperation, functions: Dict[int, NadaFunctionASTOperation] -) -> ProcessOperationOutput: + operation: ASTOperation, ctx: CompilationContext +) -> proto_op.Operation | None: """Process an AST operation. For arithmetic operations it simply returns the MIR representation of the operation. @@ -288,7 +265,6 @@ def process_operation( It ignores nada function arguments as they should not be present in the MIR. """ - processed_operation = None if isinstance( operation, ( @@ -304,29 +280,118 @@ def process_operation( ObjectAccessorASTOperation, ), ): - processed_operation = ProcessOperationOutput(operation.to_mir(), None) + return operation.to_mir() elif isinstance(operation, InputASTOperation): - add_input_to_map(operation) - processed_operation = ProcessOperationOutput(operation.to_mir(), None) + add_input_to_map(operation, ctx) + return operation.to_mir() elif isinstance(operation, LiteralASTOperation): - LITERALS[operation.literal_index] = (str(operation.value), operation.ty) - processed_operation = ProcessOperationOutput(operation.to_mir(), None) - elif isinstance( - operation, (MapASTOperation, ReduceASTOperation, NadaFunctionCallASTOperation) - ): - extra_fn = None - if operation.fn not in functions: - extra_fn = AST_OPERATIONS[operation.fn] - - processed_operation = ProcessOperationOutput(operation.to_mir(), extra_fn) # type: ignore + ctx.literals[operation.literal_index] = (str(operation.value), operation.ty) + return operation.to_mir() + elif isinstance(operation, (MapASTOperation, ReduceASTOperation)): + if operation.fn not in ctx.functions: + ctx.functions[operation.fn] = AST_OPERATIONS[operation.fn] + return operation.to_mir() elif isinstance(operation, NadaFunctionASTOperation): - extra_fn = None - if operation.id not in functions: - extra_fn = AST_OPERATIONS[operation.id] - processed_operation = ProcessOperationOutput({}, extra_fn) # type: ignore + if operation.id not in ctx.functions: + ctx.functions[operation.id] = AST_OPERATIONS[operation.id] + return None else: raise CompilerException( f"Compilation of Operation {operation} is not supported" ) - return processed_operation + + +def print_mir(mir: proto_mir.ProgramMir): + print("Parties:") + for party in mir.parties: + print(f" {party.name}") + print("Inputs:") + for input in mir.inputs: + print(f" {input.name} ty({type_to_str(input.type)}) party({input.party})") + print("Literals:") + for literal in mir.literals: + print(f" {literal.name} ty({type_to_str(literal.type)}) val({literal.value})") + print("Outputs:") + for output in mir.outputs: + print(f" {output.name} ty({type_to_str(output.type)}) oid({output.operation_id})") + print("Functions:") + for function in mir.functions: + args = ', '.join([f"{arg.name}: ty({type_to_str(arg.type)})" for arg in function.args]) + print(f" {function.name} fn_id({function.id}), args({args})") + print_operations(function.operations) + + print("Operations:") + print_operations(mir.operations) + +def print_operations(operation: List[proto_mir.OperationMapEntry]): + print() + for entry in operation: + op_id, op = entry.id, entry.operation + line = f"oid({op_id}) rty({type_to_str(op.type)}) = " + if hasattr(op, "binary"): + line += f"{op.binary.variant} oid({op.binary.left}) oid({op.binary.right})" + elif hasattr(op, "unary"): + line += f"{op.unary.variant} oid({op.unary.this})" + elif hasattr(op, "cast"): + line += f"cast oid({op.cast.target})" + elif hasattr(op, "ifelse"): + line += f"ifelse cond({op.ifelse.cond}) true({op.ifelse.first}) false({op.ifelse.second})" + elif hasattr(op, "random"): + line += f"random " + elif hasattr(op, "input_ref"): + line += f"input_ref to({op.input_ref.refers_to})" + elif hasattr(op, "arg_ref"): + line += f"arg_ref fn_id({op.arg_ref.function_id}) to({op.arg_ref.refers_to})" + elif hasattr(op, "literal_ref"): + line += f"literal_ref to({op.literal_ref.refers_to})" + elif hasattr(op, "map"): + line += f"map fn({(op.map.fn)}) oid({op.map.child})" + elif hasattr(op, "reduce"): + line += f"reduce fn({(op.reduce.fn)}) init({op.reduce.initial}) oid({op.reduce.child})" + elif hasattr(op, "new"): + oids = ", ".join([f"oid({oid})" for oid in op.new.elements]) + line += f"new {oids}" + elif hasattr(op, "array_accessor"): + line += f"array_accessor oid({op.array_accessor.source}) {op.array_accessor.index}" + elif hasattr(op, "tuple_accessor"): + line += f"tuple_accessor oid({op.tuple_accessor.source}) {op.tuple_accessor.index}" + elif hasattr(op, "ntuple_accessor"): + line += f"ntuple_accessor oid({op.ntuple_accessor.source}) {op.ntuple_accessor.index}" + elif hasattr(op, "object_accessor"): + line += f"object_accessor oid({op.object_accessor.source}) {op.object_accessor.key}" + elif hasattr(op, "cast"): + line += f"cast oid({op.cast.target}) {op.cast.to}" + else: + raise Exception(f"Unknown operation {op}") + print(line) + +def type_to_str(ty: proto_ty.NadaType): + if hasattr(ty, "integer"): + return "Integer" + elif hasattr(ty, "unsigned_integer"): + return "UnsignedInteger" + elif hasattr(ty, "boolean"): + return "Boolean" + elif hasattr(ty, "secret_integer"): + return "SecretInteger" + elif hasattr(ty, "secret_unsigned_integer"): + return "SecretUnsignedInteger" + elif hasattr(ty, "secret_boolean"): + return "SecretBoolean" + elif hasattr(ty, "ecdsa_private_key"): + return "EcdsaPrivateKey" + elif hasattr(ty, "ecdsa_digest_message"): + return "EcdsaDigestMessage" + elif hasattr(ty, "ecdsa_signature"): + return "EcdsaSignature" + elif hasattr(ty, "array"): + return f"Array[{type_to_str(ty.collection.contained_type)}:{ty.collection.array.size}]" + elif hasattr(ty, "tuple"): + return f"Tuple[{type_to_str(ty.tuple.left)}, {type_to_str(ty.tuple.right)}]" + elif hasattr(ty, "object"): + return f"Object" + elif hasattr(ty, "ntuple"): + return f"NTuple[{', '.join([type_to_str(t) for t in ty.ntuple.fields])}]" + else: + raise Exception("Unknown type {ty}") \ No newline at end of file diff --git a/nada_dsl/future/operations.py b/nada_dsl/future/operations.py index 040850b..6e0cbd2 100644 --- a/nada_dsl/future/operations.py +++ b/nada_dsl/future/operations.py @@ -2,8 +2,10 @@ from dataclasses import dataclass +from nada_mir_proto.nillion.nada.types import v1 as proto_ty + from nada_dsl import SourceRef -from nada_dsl.ast_util import AST_OPERATIONS, CastASTOperation, next_operation_id +from nada_dsl.ast_util import AST_OPERATIONS, CastASTOperation, OperationId from nada_dsl.nada_types import AllTypes, AllTypesType @@ -16,12 +18,12 @@ class Cast: source_ref: SourceRef def __init__(self, target: AllTypes, to: AllTypes, source_ref: SourceRef): - self.id = next_operation_id() + self.id = OperationId.next() self.target = target self.to = to self.source_ref = source_ref - def store_in_ast(self, ty: object): + def store_in_ast(self, ty: proto_ty.NadaType): """Store object in AST""" AST_OPERATIONS[self.id] = CastASTOperation( id=self.id, target=self.target, ty=ty, source_ref=self.source_ref diff --git a/nada_dsl/nada_types/collections.py b/nada_dsl/nada_types/collections.py index e8d4d4b..9b17b95 100644 --- a/nada_dsl/nada_types/collections.py +++ b/nada_dsl/nada_types/collections.py @@ -4,6 +4,8 @@ from typing import Any, Dict, Generic, List import typing +from sortedcontainers import SortedDict + from nada_dsl.ast_util import ( AST_OPERATIONS, BinaryASTOperation, @@ -27,7 +29,7 @@ ) from nada_dsl.nada_types.function import NadaFunction, create_nada_fn from nada_dsl.nada_types.generics import U, T, R -from . import AllTypes, AllTypesType, DslTypeRepr, OperationType +from . import AllTypes, OperationType def is_primitive_integer(nada_type_str: str): @@ -59,12 +61,12 @@ def __init__( fn: NadaFunction[T, R], source_ref: SourceRef, ): - self.id = next_operation_id() + self.id = OperationId.next() self.child = child self.fn = fn self.source_ref = source_ref - def store_in_ast(self, ty): + def store_in_ast(self, ty: proto_ty.NadaType): """Store MP in AST""" AST_OPERATIONS[self.id] = MapASTOperation( id=self.id, @@ -91,13 +93,13 @@ def __init__( initial: R, source_ref: SourceRef, ): - self.id = next_operation_id() + self.id = OperationId.next() self.child = child self.fn = fn self.initial = initial self.source_ref = source_ref - def store_in_ast(self, ty): + def store_in_ast(self, ty: proto_ty.NadaType): """Store a reduce object in AST""" AST_OPERATIONS[self.id] = ReduceASTOperation( id=self.id, @@ -114,21 +116,20 @@ class TupleType(NadaType): is_compound = True - def __init__(self, left_type: DslType, right_type: DslType): + def __init__(self, left_type: NadaType, right_type: NadaType): self.left_type = left_type self.right_type = right_type def instantiate(self, child_or_value): return Tuple(child_or_value, self.left_type, self.right_type) - def to_mir(self): + def to_mir(self) -> proto_ty.NadaType: """Convert a tuple object into a Nada type.""" - return { - "Tuple": { - "left_type": self.left_type.to_mir(), - "right_type": self.right_type.to_mir(), - } - } + return proto_ty.NadaType( + tuple=proto_ty.Tuple( + left=self.left_type.to_mir(), right=self.right_type.to_mir() + ) + ) def _generate_accessor(ty: Any, accessor: Any) -> DslType: @@ -141,17 +142,17 @@ def _generate_accessor(ty: Any, accessor: Any) -> DslType: class Tuple(Generic[T, U], DslType): """The Tuple type""" - left_type: T - right_type: U + left_type: NadaType + right_type: NadaType - def __init__(self, child, left_type: T, right_type: U): + def __init__(self, child, left_type: NadaType, right_type: NadaType): self.left_type = left_type self.right_type = right_type self.child = child super().__init__(self.child) @classmethod - def new(cls, left_value: DslType, right_value: DslType) -> "Tuple[T, U]": + def new(cls, left_value: T, right_value: U) -> "Tuple[T, U]": """Constructs a new Tuple.""" return Tuple( left_type=left_value.type(), @@ -208,12 +209,12 @@ def __init__( index: int, source_ref: SourceRef, ): - self.id = next_operation_id() + self.id = OperationId.next() self.child = child self.index = index self.source_ref = source_ref - def store_in_ast(self, ty: object): + def store_in_ast(self, ty: proto_ty.NadaType): """Store this accessor in the AST.""" AST_OPERATIONS[self.id] = TupleAccessorASTOperation( id=self.id, @@ -229,19 +230,17 @@ class NTupleType(NadaType): is_compound = True - def __init__(self, types: List[DslType]): + def __init__(self, types: List[NadaType]): self.types = types def instantiate(self, child_or_value): return NTuple(child_or_value, self.types) - def to_mir(self): + def to_mir(self) -> proto_ty.NadaType: """Convert a tuple object into a Nada type.""" - return { - "NTuple": { - "types": [ty.to_mir() for ty in self.types], - } - } + return proto_ty.NadaType( + ntuple=proto_ty.Ntuple(fields=[ty.to_mir() for ty in self.types]) + ) @dataclass @@ -298,12 +297,12 @@ def __init__( index: int, source_ref: SourceRef, ): - self.id = next_operation_id() + self.id = OperationId.next() self.child = child self.index = index self.source_ref = source_ref - def store_in_ast(self, ty: object): + def store_in_ast(self, ty: proto_ty.NadaType): """Store this accessor in the AST.""" AST_OPERATIONS[self.id] = NTupleAccessorASTOperation( id=self.id, @@ -319,14 +318,17 @@ class ObjectType(NadaType): is_compound = True - def __init__(self, types: Dict[str, DslType]): + def __init__(self, types: Dict[str, NadaType]): self.types = types - def to_mir(self): + def to_mir(self) -> proto_ty.NadaType: """Convert an object into a Nada type.""" - return { - "Object": {"types": {name: ty.to_mir() for name, ty in self.types.items()}} - } + fields = SortedDict({name: ty.to_mir() for name, ty in self.types.items()}) + return proto_ty.NadaType( + object=proto_ty.Object( + fields=[proto_ty.ObjectEntry(name=k, type=v) for k, v in fields.items()], + ) + ) def instantiate(self, child_or_value): return Object(child_or_value, self.types) @@ -388,12 +390,12 @@ def __init__( key: str, source_ref: SourceRef, ): - self.id = next_operation_id() + self.id = OperationId.next() self.child = child self.key = key self.source_ref = source_ref - def store_in_ast(self, ty: object): + def store_in_ast(self, ty: proto_ty.NadaType): """Store this accessor in the AST.""" AST_OPERATIONS[self.id] = ObjectAccessorASTOperation( id=self.id, @@ -408,16 +410,16 @@ class Zip: """The Zip operation.""" def __init__(self, left: AllTypes, right: AllTypes, source_ref: SourceRef): - self.id = next_operation_id() + self.id = OperationId.next() self.left = left self.right = right self.source_ref = source_ref - def store_in_ast(self, ty: DslTypeRepr): + def store_in_ast(self, ty: proto_ty.NadaType): """Store a Zip object in the AST.""" AST_OPERATIONS[self.id] = BinaryASTOperation( id=self.id, - name="Zip", + variant=proto_op.BinaryOperationVariant.ZIP, left=self.left.child.id, right=self.right.child.id, source_ref=self.source_ref, @@ -429,15 +431,15 @@ class Unzip: """The Unzip operation.""" def __init__(self, child: AllTypes, source_ref: SourceRef): - self.id = next_operation_id() + self.id = OperationId.next() self.child = child self.source_ref = source_ref - def store_in_ast(self, ty: DslTypeRepr): + def store_in_ast(self, ty: proto_ty.NadaType): """Store an Unzip object in the AST.""" AST_OPERATIONS[self.id] = UnaryASTOperation( id=self.id, - name="Unzip", + variant=proto_op.UnaryOperationVariant.UNZIP, child=self.child.child.id, source_ref=self.source_ref, ty=ty, @@ -448,16 +450,16 @@ class InnerProduct: """Inner product of two arrays.""" def __init__(self, left: AllTypes, right: AllTypes, source_ref: SourceRef): - self.id = next_operation_id() + self.id = OperationId.next() self.left = left self.right = right self.source_ref = source_ref - def store_in_ast(self, ty: DslTypeRepr): + def store_in_ast(self, ty: proto_ty.NadaType): """Store the InnerProduct object in the AST.""" AST_OPERATIONS[self.id] = BinaryASTOperation( id=self.id, - name="InnerProduct", + variant=proto_op.BinaryOperationVariant.INNER_PRODUCT, left=self.left.child.id, right=self.right.child.id, source_ref=self.source_ref, @@ -470,24 +472,18 @@ class ArrayType(NadaType): is_compound = True - def __init__(self, contained_type: AllTypesType, size: int): + def __init__(self, contained_type: NadaType, size: int): self.contained_type = contained_type self.size = size - def to_mir(self): + def to_mir(self) -> proto_ty.NadaType: """Convert this generic type into a MIR Nada type.""" - # TODO size is None when array used in function argument and used @nada_fn - # So you know the type but not the size, we should stop using @nada_fn decorator - # and apply the same logic when the function gets passed to .map() or .reduce() - # so we now the size of the array - if self.size is None: - raise NotImplementedError("ArrayType.to_mir") - return { - "Array": { - "inner_type": self.contained_type.to_mir(), - "size": self.size, - } - } + return proto_ty.NadaType( + array=proto_ty.Array( + size=self.size, + contained_type=self.contained_type.to_mir() + ) + ) def instantiate(self, child_or_value): return Array(child_or_value, self.size, self.contained_type) @@ -510,10 +506,10 @@ class Array(Generic[T], DslType): The size of the array """ - contained_type: T + contained_type: NadaType size: int - def __init__(self, child, size: int, contained_type: T = None): + def __init__(self, child, size: int, contained_type: NadaType = None): self.contained_type = ( contained_type if contained_type is not None else child.type() ) @@ -629,11 +625,11 @@ class TupleNew(Generic[T, U]): source_ref: SourceRef def __init__(self, child: typing.Tuple[T, U], source_ref: SourceRef): - self.id = next_operation_id() + self.id = OperationId.next() self.child = child self.source_ref = source_ref - def store_in_ast(self, ty: object): + def store_in_ast(self, ty: proto_ty.NadaType): """Store this TupleNew in the AST.""" AST_OPERATIONS[self.id] = NewASTOperation( id=self.id, @@ -655,11 +651,11 @@ class NTupleNew: source_ref: SourceRef def __init__(self, child: List[DslType], source_ref: SourceRef): - self.id = next_operation_id() + self.id = OperationId.next() self.child = child self.source_ref = source_ref - def store_in_ast(self, ty: object): + def store_in_ast(self, ty: proto_ty.NadaType): """Store this NTupleNew in the AST.""" AST_OPERATIONS[self.id] = NewASTOperation( id=self.id, @@ -681,11 +677,11 @@ class ObjectNew: source_ref: SourceRef def __init__(self, child: Dict[str, DslType], source_ref: SourceRef): - self.id = next_operation_id() + self.id = OperationId.next() self.child = child self.source_ref = source_ref - def store_in_ast(self, ty: object): + def store_in_ast(self, ty: proto_ty.NadaType): """Store this Object in the AST.""" AST_OPERATIONS[self.id] = NewASTOperation( id=self.id, @@ -720,11 +716,11 @@ class ArrayNew(Generic[T]): source_ref: SourceRef def __init__(self, child: List[T], source_ref: SourceRef): - self.id = next_operation_id() + self.id = OperationId.next() self.child = child self.source_ref = source_ref - def store_in_ast(self, ty: DslType): + def store_in_ast(self, ty: proto_ty.NadaType): """Store this ArrayNew object in the AST.""" AST_OPERATIONS[self.id] = NewASTOperation( id=self.id, diff --git a/nada_dsl/nada_types/function.py b/nada_dsl/nada_types/function.py index f9454a4..d8f160f 100644 --- a/nada_dsl/nada_types/function.py +++ b/nada_dsl/nada_types/function.py @@ -4,15 +4,16 @@ """ import inspect -from dataclasses import dataclass from typing import Generic, List, Callable + +from nada_mir_proto.nillion.nada.types import v1 as proto_ty + from nada_dsl import SourceRef from nada_dsl.ast_util import ( AST_OPERATIONS, NadaFunctionASTOperation, NadaFunctionArgASTOperation, - NadaFunctionCallASTOperation, - next_operation_id, + OperationId, ) from nada_dsl.nada_types.generics import T, R from nada_dsl.nada_types import DslType @@ -27,14 +28,14 @@ class NadaFunctionArg(Generic[T]): source_ref: SourceRef def __init__(self, function_id: int, name: str, arg_type: T, source_ref: SourceRef): - self.id = next_operation_id() + self.id = OperationId.next() self.function_id = function_id self.name = name self.type = arg_type self.source_ref = source_ref self.store_in_ast(arg_type.to_mir()) - def store_in_ast(self, ty): + def store_in_ast(self, ty: proto_ty.NadaType): """Store object in AST.""" AST_OPERATIONS[self.id] = NadaFunctionArgASTOperation( id=self.id, @@ -87,37 +88,6 @@ def store_in_ast(self): child=self.child.child.id, ) - def __call__(self, *args, **kwargs) -> R: - return self.return_type( - child=NadaFunctionCall(self, args, source_ref=SourceRef.back_frame()) - ) - - -@dataclass -class NadaFunctionCall(Generic[R]): - """Represents a call to a Nada Function.""" - - fn: NadaFunction - args: List[DslType] - source_ref: SourceRef - - def __init__(self, nada_function, args, source_ref): - self.id = next_operation_id() - self.args = args - self.fn = nada_function - self.source_ref = source_ref - self.store_in_ast(nada_function.return_type.type().to_mir()) - - def store_in_ast(self, ty): - """Store this function call in the AST.""" - AST_OPERATIONS[self.id] = NadaFunctionCallASTOperation( - id=self.id, - args=[arg.child.id for arg in self.args], - fn=self.fn.id, - source_ref=self.source_ref, - ty=ty, - ) - def create_nada_fn(fn, args_ty) -> NadaFunction[T, R]: """ @@ -132,7 +102,7 @@ def create_nada_fn(fn, args_ty) -> NadaFunction[T, R]: args = inspect.getfullargspec(fn) nada_args = [] - function_id = next_operation_id() + function_id = OperationId.next() nada_args_type_wrapped = [] for arg, arg_ty in zip(args.args, args_ty): # We'll get the function source ref for now diff --git a/nada_dsl/nada_types/scalar_types.py b/nada_dsl/nada_types/scalar_types.py index 7980150..67ed015 100644 --- a/nada_dsl/nada_types/scalar_types.py +++ b/nada_dsl/nada_types/scalar_types.py @@ -5,11 +5,16 @@ from dataclasses import dataclass from typing import Union, TypeVar from typing_extensions import Self +from betterproto.lib.google.protobuf import Empty + +from nada_mir_proto.nillion.nada.types import v1 as proto_ty + from nada_dsl.operations import * from nada_dsl.program_io import Literal from nada_dsl import SourceRef from . import DslType, Mode, BaseType, OperationType + # Constant dictionary that stores all the Nada types and is use to # convert from the (mode, base_type) representation to the concrete Nada type # (Integer, SecretBoolean,...) @@ -363,7 +368,7 @@ def instantiate(self, child_or_value): """Creates a value corresponding to this meta type""" @abstractmethod - def to_mir(self): + def to_mir(self) -> proto_ty.NadaType: """Returns a MIR representation of this meta type""" @@ -374,15 +379,8 @@ def instantiate(self, child_or_value): """Creates a value corresponding to this meta type""" return self.ty(child_or_value) - def to_mir(self): - name = self.ty.__name__ - # Rename public variables so they are considered as the same as literals. - if name.startswith("Public"): - name = name[len("Public") :].lstrip() - - if name.endswith("Type"): - name = name[: -len("Type")].rstrip() - return name + def to_mir(self) -> proto_ty.NadaType: + return proto_ty.NadaType(**{self.proto_ty: Empty()}) @register_scalar_type(Mode.CONSTANT, BaseType.INTEGER) @@ -417,6 +415,7 @@ class IntegerType(TypePassthroughMixin): ty = Integer is_constant = True is_scalar = True + proto_ty = "integer" @dataclass @@ -454,6 +453,7 @@ class UnsignedIntegerType(TypePassthroughMixin): ty = UnsignedInteger is_constant = True is_scalar = True + proto_ty = "unsigned_integer" @register_scalar_type(Mode.CONSTANT, BaseType.BOOLEAN) @@ -496,6 +496,7 @@ class BooleanType(TypePassthroughMixin): ty = Boolean is_constant = True is_scalar = True + proto_ty = "boolean" @register_scalar_type(Mode.PUBLIC, BaseType.INTEGER) @@ -526,6 +527,7 @@ class PublicIntegerType(TypePassthroughMixin): ty = PublicInteger is_scalar = True + proto_ty = "integer" @register_scalar_type(Mode.PUBLIC, BaseType.UNSIGNED_INTEGER) @@ -556,6 +558,7 @@ class PublicUnsignedIntegerType(TypePassthroughMixin): ty = PublicUnsignedInteger is_scalar = True + proto_ty = "unsigned_integer" @dataclass @@ -591,6 +594,7 @@ class PublicBooleanType(TypePassthroughMixin): ty = PublicBoolean is_scalar = True + proto_ty = "boolean" @dataclass @@ -646,6 +650,7 @@ class SecretIntegerType(TypePassthroughMixin): ty = SecretInteger is_scalar = True + proto_ty = "secret_integer" @dataclass @@ -703,6 +708,7 @@ class SecretUnsignedIntegerType(TypePassthroughMixin): ty = SecretUnsignedInteger is_scalar = True + proto_ty = "secret_unsigned_integer" @dataclass @@ -739,6 +745,7 @@ class SecretBooleanType(TypePassthroughMixin): ty = SecretBoolean is_scalar = True + proto_ty = "secret_boolean" @dataclass @@ -756,6 +763,7 @@ class EcdsaSignatureType(TypePassthroughMixin): """Meta type for EcdsaSignatures""" ty = EcdsaSignature + proto_ty = "ecdsa_signature" @dataclass @@ -773,6 +781,7 @@ class EcdsaDigestMessageType(TypePassthroughMixin): """Meta type for EcdsaDigestMessages""" ty = EcdsaDigestMessage + proto_ty = "ecdsa_digest_message" @dataclass @@ -796,3 +805,4 @@ class EcdsaPrivateKeyType(TypePassthroughMixin): """Meta type for EcdsaPrivateKeys""" ty = EcdsaPrivateKey + proto_ty = "ecdsa_private_key" diff --git a/nada_dsl/operations.py b/nada_dsl/operations.py index bee0fab..3ac904b 100644 --- a/nada_dsl/operations.py +++ b/nada_dsl/operations.py @@ -3,6 +3,9 @@ """ from dataclasses import dataclass +from nada_mir_proto.nillion.nada.types import v1 as proto_ty +from nada_mir_proto.nillion.nada.operations import v1 as proto_op + from nada_dsl import SourceRef from nada_dsl.ast_util import ( AST_OPERATIONS, @@ -10,7 +13,7 @@ IfElseASTOperation, RandomASTOperation, UnaryASTOperation, - next_operation_id, + OperationId, ) from nada_dsl.nada_types import AllTypes @@ -18,17 +21,19 @@ class BinaryOperation: """Superclass of all the binary operations.""" + variant: proto_op.BinaryOperationVariant + def __init__(self, left: AllTypes, right: AllTypes, source_ref: SourceRef): - self.id = next_operation_id() + self.id = OperationId.next() self.left = left self.right = right self.source_ref = source_ref - def store_in_ast(self, ty: object): + def store_in_ast(self, ty: proto_ty.NadaType): """Store object in AST""" AST_OPERATIONS[self.id] = BinaryASTOperation( id=self.id, - name=self.__class__.__name__, + variant=self.variant, left=self.left.child.id, right=self.right.child.id, source_ref=self.source_ref, @@ -39,16 +44,18 @@ def store_in_ast(self, ty: object): class UnaryOperation: """Superclass of all the unary operations.""" + variant: proto_op.UnaryOperationVariant + def __init__(self, child: AllTypes, source_ref: SourceRef): - self.id = next_operation_id() + self.id = OperationId.next() self.child = child self.source_ref = source_ref - def store_in_ast(self, ty: object): + def store_in_ast(self, ty: proto_ty.NadaType): """Store object in AST.""" AST_OPERATIONS[self.id] = UnaryASTOperation( id=self.id, - name=self.__class__.__name__, + variant=self.variant, child=self.child.child.id, source_ref=self.source_ref, ty=ty, @@ -58,74 +65,110 @@ def store_in_ast(self, ty: object): class Addition(BinaryOperation): """Addition operation""" + variant = proto_op.BinaryOperationVariant.ADDITION + class Subtraction(BinaryOperation): """Subtraction operation.""" + variant = proto_op.BinaryOperationVariant.SUBTRACTION + class Multiplication(BinaryOperation): """Multiplication operation""" + variant = proto_op.BinaryOperationVariant.MULTIPLICATION + class Division(BinaryOperation): """Division operation""" + variant = proto_op.BinaryOperationVariant.DIVISION + class Modulo(BinaryOperation): """Modulo operation""" + variant = proto_op.BinaryOperationVariant.MODULO + class Power(BinaryOperation): """Power operation""" + variant = proto_op.BinaryOperationVariant.POWER + class RightShift(BinaryOperation): """Right shift (>>) operation.""" + variant = proto_op.BinaryOperationVariant.RIGHT_SHIFT + class LeftShift(BinaryOperation): """Left shift (<<)operation.""" + variant = proto_op.BinaryOperationVariant.LEFT_SHIFT + class LessThan(BinaryOperation): """Less than (<) operation""" + variant = proto_op.BinaryOperationVariant.LESS_THAN + class GreaterThan(BinaryOperation): """Greater than (>) operation.""" + variant = proto_op.BinaryOperationVariant.GREATER_THAN + class LessOrEqualThan(BinaryOperation): """Less or equal (<=) operation.""" + variant = proto_op.BinaryOperationVariant.LESS_EQ + class GreaterOrEqualThan(BinaryOperation): """Greater or equal (>=) operation.""" + variant = proto_op.BinaryOperationVariant.GREATER_EQ + class Equals(BinaryOperation): """Equals (==) operation""" + variant = proto_op.BinaryOperationVariant.EQUALS + class NotEquals(BinaryOperation): """Not equals (!=) operation.""" + variant = proto_op.BinaryOperationVariant.NOT_EQUALS + class PublicOutputEquality(BinaryOperation): """Public output equality operation.""" + variant = proto_op.BinaryOperationVariant.EQUALS_PUBLIC_OUTPUT + class BooleanAnd(BinaryOperation): """Boolean AND (&) operation.""" + variant = proto_op.BinaryOperationVariant.BOOL_AND + class BooleanOr(BinaryOperation): """Boolean OR (|) operation.""" + variant = proto_op.BinaryOperationVariant.BOOL_OR + class BooleanXor(BinaryOperation): """Boolean XOR (^) operation.""" + variant = proto_op.BinaryOperationVariant.BOOL_XOR + class Random: """Random operation.""" @@ -133,10 +176,10 @@ class Random: source_ref: SourceRef def __init__(self, source_ref): - self.id = next_operation_id() + self.id = OperationId.next() self.source_ref = source_ref - def store_in_ast(self, ty): + def store_in_ast(self, ty: proto_ty.NadaType): """Store object in AST.""" AST_OPERATIONS[self.id] = RandomASTOperation( id=self.id, ty=ty, source_ref=self.source_ref @@ -157,13 +200,13 @@ class IfElse: def __init__( self, this: AllTypes, arg_0: AllTypes, arg_1: AllTypes, source_ref: SourceRef ): - self.id = next_operation_id() + self.id = OperationId.next() self.this = this self.arg_0 = arg_0 self.arg_1 = arg_1 self.source_ref = source_ref - def store_in_ast(self, ty): + def store_in_ast(self, ty: proto_ty.NadaType): """Store object in AST.""" AST_OPERATIONS[self.id] = IfElseASTOperation( id=self.id, @@ -178,6 +221,8 @@ def store_in_ast(self, ty): class Reveal(UnaryOperation): """Reveal (i.e. make public) operation.""" + variant = proto_op.UnaryOperationVariant.REVEAL + def __init__(self, this: AllTypes, source_ref: SourceRef): super().__init__(child=this, source_ref=source_ref) @@ -185,13 +230,19 @@ def __init__(self, this: AllTypes, source_ref: SourceRef): class TruncPr(BinaryOperation): """Probabilistic Truncation operation.""" + variant = proto_op.BinaryOperationVariant.TRUNC_PR + class Not(UnaryOperation): """Not (!) Operation""" + variant = proto_op.UnaryOperationVariant.NOT + def __init__(self, this: AllTypes, source_ref: SourceRef): super().__init__(child=this, source_ref=source_ref) class EcdsaSign(BinaryOperation): """Ecdsa signing operation.""" + + variant = proto_op.BinaryOperationVariant.ECDSA_SIGN diff --git a/nada_dsl/program_io.py b/nada_dsl/program_io.py index 9848ab6..0486887 100644 --- a/nada_dsl/program_io.py +++ b/nada_dsl/program_io.py @@ -6,12 +6,13 @@ from dataclasses import dataclass from typing import Any +from nada_mir_proto.nillion.nada.types import v1 as proto_ty from nada_dsl.ast_util import ( AST_OPERATIONS, InputASTOperation, LiteralASTOperation, - next_operation_id, + OperationId, ) from nada_dsl.errors import InvalidTypeError from nada_dsl.nada_types import AllTypes, Party @@ -35,7 +36,7 @@ class Input(DslType): source_ref: SourceRef def __init__(self, name, party, doc=""): - self.id = next_operation_id() + self.id = OperationId.next() self.name = name self.party = party self.doc = doc @@ -43,7 +44,7 @@ def __init__(self, name, party, doc=""): self.source_ref = SourceRef.back_frame() super().__init__(self.child) - def store_in_ast(self, ty: object): + def store_in_ast(self, ty: proto_ty.NadaType): """Store object in AST""" AST_OPERATIONS[self.id] = InputASTOperation( id=self.id, @@ -68,13 +69,13 @@ class Literal(DslType): source_ref: SourceRef def __init__(self, value, source_ref): - self.id = next_operation_id() + self.id = OperationId.next() self.value = value self.source_ref = source_ref self.child = None super().__init__(self.child) - def store_in_ast(self, ty: object): + def store_in_ast(self, ty: proto_ty.NadaType): """Store object in AST""" AST_OPERATIONS[self.id] = LiteralASTOperation( operation_id=self.id, diff --git a/nada_dsl/source_ref.py b/nada_dsl/source_ref.py index b2b4755..77c3c4f 100644 --- a/nada_dsl/source_ref.py +++ b/nada_dsl/source_ref.py @@ -8,6 +8,7 @@ from dataclasses import dataclass from typing import Tuple import inspect +from nada_mir_proto.nillion.nada.mir import v1 as proto_mir USED_SOURCES = {} REFS = [] @@ -79,7 +80,7 @@ def to_index(self) -> int: as well as an entry in an array, and returns an index to it""" global next_index key = self.to_key() - value = self.to_value() + value = self.to_mir() if key in index_map: return index_map[key] @@ -88,14 +89,14 @@ def to_index(self) -> int: next_index += 1 return index_map[key] - def to_value(self): - """Convert the SourceRef object to a dictionary.""" - return { - "lineno": self.lineno, - "offset": self.offset, - "file": self.file, - "length": self.length, - } + def to_mir(self): + """Convert the SourceRef object to MIR""" + return proto_mir.SourceRef( + lineno=self.lineno, + offset=self.offset, + file=self.file, + length=self.length, + ) def to_key(self): """Convert the current object into the key representation used by 'index_map'""" diff --git a/nada_mir/MANIFEST.in b/nada_mir/MANIFEST.in new file mode 100644 index 0000000..121482f --- /dev/null +++ b/nada_mir/MANIFEST.in @@ -0,0 +1,8 @@ +graft src/ +include README.md + +# Explicitly exclude some files +global-exclude *.pyc +global-exclude __pycache__ +global-exclude .git* +global-exclude .DS_Store diff --git a/nada_mir/proto/nillion/nada/v1/mir.proto b/nada_mir/proto/nillion/nada/v1/mir.proto new file mode 100644 index 0000000..95ec847 --- /dev/null +++ b/nada_mir/proto/nillion/nada/v1/mir.proto @@ -0,0 +1,114 @@ +syntax = "proto3"; + +package nillion.nada.mir.v1; + +import "nillion/nada/v1/operations.proto"; +import "nillion/nada/v1/types.proto"; + +message OperationMapEntry { + // id + uint64 id = 1; + // Value + nillion.nada.operations.v1.Operation operation = 2; +} + +message NadaFunctionArg { + // Argument name + string name = 1; + // Argument type + nillion.nada.types.v1.NadaType type = 2; + // Source code info about this element. + uint64 source_ref_index = 3; +} + +message NadaFunction { + // Function identifier + uint64 id = 1; + // Function arguments + repeated NadaFunctionArg args = 2; + // The name of the function + string name = 3; + // Table of operations, we use repeated OperationMapEntry ordered by id to have deterministic compilation + repeated OperationMapEntry operations = 4; + // Identifier of the operation (in the operations map) that represents + // the return of this function + uint64 return_operation_id = 5; + // Function return type + nillion.nada.types.v1.NadaType return_type = 6; + // NadaFunction source file information. + uint64 source_ref_index = 7; +} + +message Party { + // Name of the party + string name = 1; + // Source reference + uint64 source_ref_index = 2; +} + +message Input { + // Operation type + nillion.nada.types.v1.NadaType type = 1; + // Party that contains this input + string party = 2; + // Input name + string name = 3; + // The documentation. + string doc = 4; + // Source file info related with this operation. + uint64 source_ref_index = 5; +} + +message Literal { + // Name + string name = 2; + // Value + string value = 3; + // Type + nillion.nada.types.v1.NadaType type = 4; +} + +message SourceRef { + // Nada-lang file that contains the elements + string file = 1; + // Line number into the file that contains the element + uint32 lineno = 2; + // Element's offset into the file + uint32 offset = 3; + // Element's length into the file + uint32 length = 4; +} + +message Output { + // Output name + string name = 1; + // Output operation ID + uint64 operation_id = 2; + // Party contains this output + string party = 3; + // Output type + nillion.nada.types.v1.NadaType type = 4; + // Source file info related with this output. + uint64 source_ref_index = 5; +} + +// The Program MIR. +// This is the container of all the MIR representation for a program. +message ProgramMIR { + // List of the functions are used by the program + repeated NadaFunction functions = 1; + // Program parties + repeated Party parties = 2; + // Program inputs + repeated Input inputs = 3; + // Program literals + repeated Literal literals = 4; + // Program output + repeated Output outputs = 5; + // Table of operations, we use repeated OperationMapEntry ordered by id to have deterministic compilation + repeated OperationMapEntry operations = 6; + // Source file info related with the program. + map source_files = 7; + // Array of source references + repeated SourceRef source_refs = 8; +} \ No newline at end of file diff --git a/nada_mir/proto/nillion/nada/v1/operations.proto b/nada_mir/proto/nillion/nada/v1/operations.proto new file mode 100644 index 0000000..8cc4044 --- /dev/null +++ b/nada_mir/proto/nillion/nada/v1/operations.proto @@ -0,0 +1,215 @@ +syntax = "proto3"; + +package nillion.nada.operations.v1; + +import "nillion/nada/v1/types.proto"; +import "google/protobuf/empty.proto"; + +// The variant of the binary operation. +enum BinaryOperationVariant { + // Addition operation variant + ADDITION = 0; + // Addition operation variant + SUBTRACTION = 1; + // Multiplication operation variant + MULTIPLICATION = 2; + // Less-than comparison operation variant + LESS_THAN = 3; + // Less-or-equal-than comparison operation variant + LESS_EQ = 4; + // Greater-than comparison operation variant + GREATER_THAN = 5; + // Greater-or-equal-than comparison operation variant + GREATER_EQ = 6; + // Equals public output comparison operation variant + EQUALS_PUBLIC_OUTPUT = 7; + // Equals comparison operation variant also public-public + // comparisons + EQUALS = 8; + // Modulo operation variant + MODULO = 9; + // Power operation variant + POWER = 10; + // Division operation variant + DIVISION = 11; + // Left Shift operation variant + LEFT_SHIFT = 12; + // Right Shift operation variant + RIGHT_SHIFT = 13; + // Probabilistic truncation operation variant + TRUNC_PR = 14; + // Not equals operation + NOT_EQUALS = 15; + // Boolean AND operation variant + BOOL_AND = 16; + // Boolean OR operation variant + BOOL_OR = 17; + // Boolean XOR operation variant + BOOL_XOR = 18; + // Zip operation variant + ZIP = 19; + // Inner product operation variant + INNER_PRODUCT = 20; + // ECDSA sign operation variant + ECDSA_SIGN = 21; +} + +// The variant of the binary operation. +enum UnaryOperationVariant { + // Unzip operation variant + UNZIP = 0; + // Reveal operation variant + REVEAL = 1; + // Not operation variant + NOT = 2; +} + +// MIR Binary operation. +// Basically most arithmetic operations: Addition, Subtraction +// Division, Modulo, Power, etc. +message BinaryOperation { + // Operation variant + BinaryOperationVariant variant = 1; + // Left operand of the operation + uint64 left = 2; + // Right operand of the operation + uint64 right = 3; +} + +// Represents a MIR Unary operation: +// - Cast +// - Not +// - Reveal +// - Unzip +message UnaryOperation { + // Operation variant + UnaryOperationVariant variant = 1; + // The operand of the operation + uint64 this = 2; +} + +message IfElseOperation { + // operand of the conditional operation + uint64 cond = 1; + // operand of the first operation + uint64 first = 2; + // operand of the second operation + uint64 second = 3; +} + +// Input reference structure, can be used for: +// - Input +// - Literal +// Also, it is used to describe the nada function arguments. +message InputReference { + // Index of the input/literal operation referred by this operation + string refers_to = 1; +} + + +message LiteralReference { + // Literal refers to + string refers_to = 1; +} + +message MapOperation { + // Function to execute + uint64 fn = 1; + // Map operation child + uint64 child = 2; +} + +message ReduceOperation { + // Function to execute + uint64 fn = 1; + // Reduce operation child + uint64 child = 2; + // Initial accumulator value + uint64 initial = 3; +} + +message NewOperation { + // The elements of this compound type + repeated uint64 elements = 1; +} + +message ArrayAccessor { + // array index - for now an integer but eventually it could be the result of + // an operation + uint32 index = 1; + // source - The Operation that represents the array we are accessing + uint64 source = 2; +} + +enum TupleIndex { + // The left element of the tuple + LEFT = 0; + // The right element of the tuple + RIGHT = 1; +} + +message TupleAccessor { + // tuple index (left or right) + TupleIndex index = 1; + // source - The Operation that represents the tuple we are accessing + uint64 source = 2; +} + +message NtupleAccessor { + // tuple index (left or right) + uint32 index = 1; + // source - The Operation that represents the tuple we are accessing + uint64 source = 2; +} + +message ObjectAccessor { + // object key + string key = 1; + // source - The Operation that represents the object we are accessing + uint64 source = 2; +} + +message NadaFunctionArgRef { + // Function owner of this argument + uint64 function_id = 1; + // Refers to the argument + string refers_to = 2; +} + +message CastOperation { + // The operand of the operation + uint64 target = 1; + // The type to cast to + nillion.nada.types.v1.NadaType cast_to = 2; +} + +// The Operation. +// An operation is identified by: +// - The operation variant +message Operation { + // Operation identifier + uint64 id = 1; + // The output type of the operation + nillion.nada.types.v1.NadaType type = 2; + // Source file info related with this operation. + uint64 source_ref_index = 3; + + oneof operation { + BinaryOperation binary = 4; + UnaryOperation unary = 5; + IfElseOperation ifelse = 6; + google.protobuf.Empty random = 7; + InputReference input_ref = 8; + LiteralReference literal_ref = 9; + NadaFunctionArgRef arg_ref = 10; + MapOperation map = 11; + ReduceOperation reduce = 12; + NewOperation new = 13; + ArrayAccessor array_accessor = 14; + TupleAccessor tuple_accessor = 15; + NtupleAccessor ntuple_accessor = 16; + ObjectAccessor object_accessor = 17; + CastOperation cast = 18; + } + +} diff --git a/nada_mir/proto/nillion/nada/v1/types.proto b/nada_mir/proto/nillion/nada/v1/types.proto new file mode 100644 index 0000000..f0ab75a --- /dev/null +++ b/nada_mir/proto/nillion/nada/v1/types.proto @@ -0,0 +1,56 @@ +syntax = "proto3"; + +import "google/protobuf/empty.proto"; + +package nillion.nada.types.v1; + +// Array type, defines a collection of homogeneous values +message Array { + // Type of the elements of this array + NadaType contained_type = 1; + // Size of the array + uint32 size = 2; +} + +// Tuple type, a binary tuple +message Tuple { + // Left type + NadaType left = 1; + // Right type + NadaType right = 2; +} + +message Ntuple { + // Number of elements in the tuple + repeated NadaType fields = 1; +} + +message ObjectEntry { + // Name of the field + string name = 1; + // Type of the field + NadaType type = 2; +} + +message Object { + // Fields of the object, we use repeated ObjectEntry ordered by name to have deterministic compilation + repeated ObjectEntry fields = 1; +} + +message NadaType { + oneof nada_type { + google.protobuf.Empty integer = 1; + google.protobuf.Empty unsigned_integer = 2; + google.protobuf.Empty boolean = 3; + google.protobuf.Empty secret_integer = 4; + google.protobuf.Empty secret_unsigned_integer = 5; + google.protobuf.Empty secret_boolean = 6; + google.protobuf.Empty ecdsa_private_key = 7; + google.protobuf.Empty ecdsa_digest_message = 8; + google.protobuf.Empty ecdsa_signature = 9; + Array array = 10; + Tuple tuple = 11; + Ntuple ntuple = 12; + Object object = 13; + } +} \ No newline at end of file diff --git a/nada_mir/pyproject.toml b/nada_mir/pyproject.toml index 6a6102f..8c4f171 100644 --- a/nada_mir/pyproject.toml +++ b/nada_mir/pyproject.toml @@ -15,7 +15,16 @@ classifiers = [ "Operating System :: OS Independent", ] -dependencies = ["grpcio-tools==1.62.3", "betterproto==2.0.0b7"] +dependencies = [ + "betterproto==2.0.0b7", + "pydantic==2.10.3" +] + +[tool.uv] +dev-dependencies = [ + "grpcio-tools==1.62.3" +] [project.optional-dependencies] dev = ["betterproto[compiler]==2.0.0b7"] +protoc = ["grpcio-tools==1.62.3"] diff --git a/nada_mir/scripts/gen_proto.sh b/nada_mir/scripts/gen_proto.sh index d07ec74..e8a98b3 100755 --- a/nada_mir/scripts/gen_proto.sh +++ b/nada_mir/scripts/gen_proto.sh @@ -6,9 +6,7 @@ set -e SCRIPT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}" 2>/dev/null)" && pwd -P)" -# This is the path where nada-mir-model has been checked out in your workstation -NADA_MIR_MODEL_ROOT=${SCRIPT_PATH}/../../../nada-mir-model -GRPC_ROOT=${NADA_MIR_MODEL_ROOT}/proto +PROTO_ROOT=${SCRIPT_PATH}/../proto mir_files=( "nillion/nada/v1/mir.proto" @@ -24,5 +22,8 @@ mkdir -p ${OUTPUT_DIR} cd ${SCRIPT_PATH}/../src for file in "${mir_files[@]}"; do - python -m grpc_tools.protoc -I${GRPC_ROOT} --python_betterproto_out=nada_mir_proto $file + python -m grpc_tools.protoc \ + -I${PROTO_ROOT} \ + --python_betterproto_out=nada_mir_proto \ + $file done diff --git a/nada_mir/src/nada_mir_proto/nillion/nada/mir/v1/__init__.py b/nada_mir/src/nada_mir_proto/nillion/nada/mir/v1/__init__.py index c9f75c6..fa3c9b1 100644 --- a/nada_mir/src/nada_mir_proto/nillion/nada/mir/v1/__init__.py +++ b/nada_mir/src/nada_mir_proto/nillion/nada/mir/v1/__init__.py @@ -15,6 +15,15 @@ from ...types import v1 as __types_v1__ +@dataclass(eq=False, repr=False) +class OperationMapEntry(betterproto.Message): + id: int = betterproto.uint64_field(1) + """id""" + + operation: "__operations_v1__.Operation" = betterproto.message_field(2) + """Value""" + + @dataclass(eq=False, repr=False) class NadaFunctionArg(betterproto.Message): name: str = betterproto.string_field(1) @@ -38,8 +47,10 @@ class NadaFunction(betterproto.Message): name: str = betterproto.string_field(3) """The name of the function""" - operations: List["__operations_v1__.Operation"] = betterproto.message_field(4) - """Table of operations for the function""" + operations: List["OperationMapEntry"] = betterproto.message_field(4) + """ + Table of operations, we use repeated OperationMapEntry ordered by id to have deterministic compilation + """ return_operation_id: int = betterproto.uint64_field(5) """ @@ -56,42 +67,33 @@ class NadaFunction(betterproto.Message): @dataclass(eq=False, repr=False) class Party(betterproto.Message): - id: int = betterproto.uint64_field(1) - """Party index""" - - name: str = betterproto.string_field(2) + name: str = betterproto.string_field(1) """Name of the party""" - source_ref_index: int = betterproto.uint64_field(3) + source_ref_index: int = betterproto.uint64_field(2) """Source reference""" @dataclass(eq=False, repr=False) class Input(betterproto.Message): - id: int = betterproto.uint64_field(1) - """Input index""" - - type: "__types_v1__.NadaType" = betterproto.message_field(2) + type: "__types_v1__.NadaType" = betterproto.message_field(1) """Operation type""" - party_id: int = betterproto.uint64_field(3) + party: str = betterproto.string_field(2) """Party that contains this input""" - name: str = betterproto.string_field(4) + name: str = betterproto.string_field(3) """Input name""" - doc: str = betterproto.string_field(5) + doc: str = betterproto.string_field(4) """The documentation.""" - source_ref_index: int = betterproto.uint64_field(6) + source_ref_index: int = betterproto.uint64_field(5) """Source file info related with this operation.""" @dataclass(eq=False, repr=False) class Literal(betterproto.Message): - id: int = betterproto.uint64_field(1) - """Literal index""" - name: str = betterproto.string_field(2) """Name""" @@ -122,8 +124,8 @@ class Output(betterproto.Message): name: str = betterproto.string_field(1) """Output name""" - operation_id: int = betterproto.int64_field(2) - """Output inner operation""" + operation_id: int = betterproto.uint64_field(2) + """Output operation ID""" party: str = betterproto.string_field(3) """Party contains this output""" @@ -131,7 +133,7 @@ class Output(betterproto.Message): type: "__types_v1__.NadaType" = betterproto.message_field(4) """Output type""" - source_ref_index: int = betterproto.int64_field(5) + source_ref_index: int = betterproto.uint64_field(5) """Source file info related with this output.""" @@ -157,8 +159,10 @@ class ProgramMir(betterproto.Message): outputs: List["Output"] = betterproto.message_field(5) """Program output""" - operations: List["__operations_v1__.Operation"] = betterproto.message_field(6) - """Table of operations""" + operations: List["OperationMapEntry"] = betterproto.message_field(6) + """ + Table of operations, we use repeated OperationMapEntry ordered by id to have deterministic compilation + """ source_files: Dict[str, str] = betterproto.map_field( 7, betterproto.TYPE_STRING, betterproto.TYPE_STRING diff --git a/nada_mir/src/nada_mir_proto/nillion/nada/operations/v1/__init__.py b/nada_mir/src/nada_mir_proto/nillion/nada/operations/v1/__init__.py index 3d71f39..96a5a8e 100644 --- a/nada_mir/src/nada_mir_proto/nillion/nada/operations/v1/__init__.py +++ b/nada_mir/src/nada_mir_proto/nillion/nada/operations/v1/__init__.py @@ -7,126 +7,95 @@ from typing import List import betterproto +import betterproto.lib.google.protobuf as betterproto_lib_google_protobuf from ...types import v1 as __types_v1__ -class OperationVariant(betterproto.Enum): - """ - The variant of the operation. - This enumeration effectively lists all the different operations supported - """ - - REDUCE = 0 - """Reduce operation variant""" - - MAP = 1 - """Map operation variant""" - - UNZIP = 2 - """Unzip operation variant""" +class BinaryOperationVariant(betterproto.Enum): + """The variant of the binary operation.""" - ZIP = 3 - """Zip operation variant""" - - ADDITION = 4 + ADDITION = 0 """Addition operation variant""" - SUBTRACTION = 5 + SUBTRACTION = 1 """Addition operation variant""" - MULTIPLICATION = 6 + MULTIPLICATION = 2 """Multiplication operation variant""" - LESS_THAN = 7 + LESS_THAN = 3 """Less-than comparison operation variant""" - LESS_EQ = 8 + LESS_EQ = 4 """Less-or-equal-than comparison operation variant""" - GREATER_THAN = 9 + GREATER_THAN = 5 """Greater-than comparison operation variant""" - GREATER_EQ = 10 + GREATER_EQ = 6 """Greater-or-equal-than comparison operation variant""" - EQUALS_PUBLIC_OUTPUT = 11 + EQUALS_PUBLIC_OUTPUT = 7 """Equals public output comparison operation variant""" - EQUALS = 12 + EQUALS = 8 """ Equals comparison operation variant also public-public comparisons """ - CAST = 13 - """Cast operation variant""" - - INPUT_REF = 14 - """InputReference operation variant""" - - LITERAL_REF = 15 - """LiteralReference operation variant""" - - NADA_FN_ARG_REF = 16 - """Nada function argument variant""" - - MODULO = 17 + MODULO = 9 """Modulo operation variant""" - POWER = 18 + POWER = 10 """Power operation variant""" - DIVISION = 19 + DIVISION = 11 """Division operation variant""" - NADA_FN_CALL = 20 - """Nada function call variant""" - - ARRAY_ACC = 21 - """Array accessor variant""" + LEFT_SHIFT = 12 + """Left Shift operation variant""" - TUPLE_ACC = 22 - """Tuple accessor variant""" + RIGHT_SHIFT = 13 + """Right Shift operation variant""" - NEW = 23 - """New operation variant""" + TRUNC_PR = 14 + """Probabilistic truncation operation variant""" - RANDOM = 24 - """Random operation variant""" + NOT_EQUALS = 15 + """Not equals operation""" - IF_ELSE = 25 - """IfElse operation variant""" + BOOL_AND = 16 + """Boolean AND operation variant""" - REVEAL = 26 - """Reveal operation variant""" + BOOL_OR = 17 + """Boolean OR operation variant""" - NOT = 27 - """Not operation variant""" + BOOL_XOR = 18 + """Boolean XOR operation variant""" - LEFT_SHIFT = 28 - """Left Shift operation variant""" + ZIP = 19 + """Zip operation variant""" - RIGHT_SHIFT = 29 - """Right Shift operation variant""" + INNER_PRODUCT = 20 + """Inner product operation variant""" - TRUNC_PR = 30 - """Probabilistic truncation operation variant""" + ECDSA_SIGN = 21 + """ECDSA sign operation variant""" - INNER_PROD = 31 - """Inner product operation""" - NOT_EQUALS = 32 - """Not equals operation""" +class UnaryOperationVariant(betterproto.Enum): + """The variant of the binary operation.""" - BOOL_AND = 33 - """Boolean AND operation variant""" + UNZIP = 0 + """Unzip operation variant""" - BOOL_OR = 34 - """Boolean OR operation variant""" + REVEAL = 1 + """Reveal operation variant""" - BOOL_XOR = 35 - """Boolean XOR operation variant""" + NOT = 2 + """Not operation variant""" class TupleIndex(betterproto.Enum): @@ -137,26 +106,6 @@ class TupleIndex(betterproto.Enum): """The right element of the tuple""" -@dataclass(eq=False, repr=False) -class OperationDescriptor(betterproto.Message): - """ - The operation descriptor abstracts the base elements that identify any - operation: - - The operation identifier - - The output type of the operation - - The index of the source reference - """ - - id: int = betterproto.uint64_field(1) - """Operation identifier""" - - type: "__types_v1__.NadaType" = betterproto.message_field(2) - """The output type of the operation""" - - source_ref_index: int = betterproto.uint64_field(3) - """Source file info related with this operation.""" - - @dataclass(eq=False, repr=False) class BinaryOperation(betterproto.Message): """ @@ -165,8 +114,8 @@ class BinaryOperation(betterproto.Message): Division, Modulo, Power, etc. """ - op: "OperationDescriptor" = betterproto.message_field(1) - """Operation descriptor""" + variant: "BinaryOperationVariant" = betterproto.enum_field(1) + """Operation variant""" left: int = betterproto.uint64_field(2) """Left operand of the operation""" @@ -185,8 +134,8 @@ class UnaryOperation(betterproto.Message): - Unzip """ - op: "OperationDescriptor" = betterproto.message_field(1) - """Operation descriptor""" + variant: "UnaryOperationVariant" = betterproto.enum_field(1) + """Operation variant""" this: int = betterproto.uint64_field(2) """The operand of the operation""" @@ -194,25 +143,16 @@ class UnaryOperation(betterproto.Message): @dataclass(eq=False, repr=False) class IfElseOperation(betterproto.Message): - op: "OperationDescriptor" = betterproto.message_field(1) - """Operation descriptor""" - - cond: int = betterproto.uint64_field(2) + cond: int = betterproto.uint64_field(1) """operand of the conditional operation""" - first: int = betterproto.uint64_field(3) + first: int = betterproto.uint64_field(2) """operand of the first operation""" - second: int = betterproto.uint64_field(4) + second: int = betterproto.uint64_field(3) """operand of the second operation""" -@dataclass(eq=False, repr=False) -class RandomOperation(betterproto.Message): - op: "OperationDescriptor" = betterproto.message_field(1) - """Operation descriptor""" - - @dataclass(eq=False, repr=False) class InputReference(betterproto.Message): """ @@ -222,95 +162,98 @@ class InputReference(betterproto.Message): Also, it is used to describe the nada function arguments. """ - op: "OperationDescriptor" = betterproto.message_field(1) - """Operation descriptor""" - - refers_to: int = betterproto.uint64_field(2) + refers_to: str = betterproto.string_field(1) """Index of the input/literal operation referred by this operation""" @dataclass(eq=False, repr=False) -class MapOperation(betterproto.Message): - op: "OperationDescriptor" = betterproto.message_field(1) - """Operation descriptor""" +class LiteralReference(betterproto.Message): + refers_to: str = betterproto.string_field(1) + """Literal refers to""" + - fn: int = betterproto.uint64_field(2) +@dataclass(eq=False, repr=False) +class MapOperation(betterproto.Message): + fn: int = betterproto.uint64_field(1) """Function to execute""" - inner: int = betterproto.uint64_field(3) - """Map operation input""" + child: int = betterproto.uint64_field(2) + """Map operation child""" @dataclass(eq=False, repr=False) class ReduceOperation(betterproto.Message): - op: "OperationDescriptor" = betterproto.message_field(1) - """Operation descriptor""" - - fn: int = betterproto.uint64_field(2) + fn: int = betterproto.uint64_field(1) """Function to execute""" - inner: int = betterproto.uint64_field(3) - """Map operation input""" + child: int = betterproto.uint64_field(2) + """Reduce operation child""" - initial: int = betterproto.uint64_field(4) + initial: int = betterproto.uint64_field(3) """Initial accumulator value""" @dataclass(eq=False, repr=False) class NewOperation(betterproto.Message): - op: "OperationDescriptor" = betterproto.message_field(1) - """Operation descriptor""" - - elements: List[int] = betterproto.uint64_field(2) + elements: List[int] = betterproto.uint64_field(1) """The elements of this compound type""" @dataclass(eq=False, repr=False) class ArrayAccessor(betterproto.Message): - op: "OperationDescriptor" = betterproto.message_field(1) - """Operation descriptor""" - - index: int = betterproto.uint32_field(2) + index: int = betterproto.uint32_field(1) """ array index - for now an integer but eventually it could be the result of an operation """ - source: int = betterproto.uint64_field(3) + source: int = betterproto.uint64_field(2) """source - The Operation that represents the array we are accessing""" @dataclass(eq=False, repr=False) class TupleAccessor(betterproto.Message): - op: "OperationDescriptor" = betterproto.message_field(1) - """Operation descriptor""" + index: "TupleIndex" = betterproto.enum_field(1) + """tuple index (left or right)""" + + source: int = betterproto.uint64_field(2) + """source - The Operation that represents the tuple we are accessing""" + - index: "TupleIndex" = betterproto.enum_field(2) +@dataclass(eq=False, repr=False) +class NtupleAccessor(betterproto.Message): + index: int = betterproto.uint32_field(1) """tuple index (left or right)""" - source: int = betterproto.uint64_field(3) + source: int = betterproto.uint64_field(2) """source - The Operation that represents the tuple we are accessing""" @dataclass(eq=False, repr=False) -class NadaFunctionArgRef(betterproto.Message): - arg: "InputReference" = betterproto.message_field(1) - """The input reference for this argument""" +class ObjectAccessor(betterproto.Message): + key: str = betterproto.string_field(1) + """object key""" - function_id: int = betterproto.uint64_field(2) - """Function owner of this argument""" + source: int = betterproto.uint64_field(2) + """source - The Operation that represents the object we are accessing""" @dataclass(eq=False, repr=False) -class NadaFunctionCall(betterproto.Message): - op: "OperationDescriptor" = betterproto.message_field(1) - """Operation descriptor""" +class NadaFunctionArgRef(betterproto.Message): + function_id: int = betterproto.uint64_field(1) + """Function owner of this argument""" + + refers_to: str = betterproto.string_field(2) + """Refers to the argument""" - function_id: int = betterproto.uint64_field(2) - """Function owner of this call""" - args: List[int] = betterproto.uint64_field(3) - """Arguments of the call""" +@dataclass(eq=False, repr=False) +class CastOperation(betterproto.Message): + target: int = betterproto.uint64_field(1) + """The operand of the operation""" + + cast_to: "__types_v1__.NadaType" = betterproto.message_field(2) + """The type to cast to""" @dataclass(eq=False, repr=False) @@ -321,16 +264,29 @@ class Operation(betterproto.Message): - The operation variant """ - id: "OperationVariant" = betterproto.enum_field(1) - binary: "BinaryOperation" = betterproto.message_field(2, group="operation") - unary: "UnaryOperation" = betterproto.message_field(3, group="operation") - ifelse: "IfElseOperation" = betterproto.message_field(4, group="operation") - random: "RandomOperation" = betterproto.message_field(5, group="operation") - input: "InputReference" = betterproto.message_field(6, group="operation") - map: "MapOperation" = betterproto.message_field(7, group="operation") - reduce: "ReduceOperation" = betterproto.message_field(8, group="operation") - new: "NewOperation" = betterproto.message_field(9, group="operation") - array_accessor: "ArrayAccessor" = betterproto.message_field(10, group="operation") - tuple_accessor: "TupleAccessor" = betterproto.message_field(11, group="operation") - arg: "NadaFunctionArgRef" = betterproto.message_field(12, group="operation") - call: "NadaFunctionCall" = betterproto.message_field(13, group="operation") + id: int = betterproto.uint64_field(1) + """Operation identifier""" + + type: "__types_v1__.NadaType" = betterproto.message_field(2) + """The output type of the operation""" + + source_ref_index: int = betterproto.uint64_field(3) + """Source file info related with this operation.""" + + binary: "BinaryOperation" = betterproto.message_field(4, group="operation") + unary: "UnaryOperation" = betterproto.message_field(5, group="operation") + ifelse: "IfElseOperation" = betterproto.message_field(6, group="operation") + random: "betterproto_lib_google_protobuf.Empty" = betterproto.message_field( + 7, group="operation" + ) + input_ref: "InputReference" = betterproto.message_field(8, group="operation") + literal_ref: "LiteralReference" = betterproto.message_field(9, group="operation") + arg_ref: "NadaFunctionArgRef" = betterproto.message_field(10, group="operation") + map: "MapOperation" = betterproto.message_field(11, group="operation") + reduce: "ReduceOperation" = betterproto.message_field(12, group="operation") + new: "NewOperation" = betterproto.message_field(13, group="operation") + array_accessor: "ArrayAccessor" = betterproto.message_field(14, group="operation") + tuple_accessor: "TupleAccessor" = betterproto.message_field(15, group="operation") + ntuple_accessor: "NtupleAccessor" = betterproto.message_field(16, group="operation") + object_accessor: "ObjectAccessor" = betterproto.message_field(17, group="operation") + cast: "CastOperation" = betterproto.message_field(18, group="operation") diff --git a/nada_mir/src/nada_mir_proto/nillion/nada/types/v1/__init__.py b/nada_mir/src/nada_mir_proto/nillion/nada/types/v1/__init__.py index 1bcbff0..6336b51 100644 --- a/nada_mir/src/nada_mir_proto/nillion/nada/types/v1/__init__.py +++ b/nada_mir/src/nada_mir_proto/nillion/nada/types/v1/__init__.py @@ -4,52 +4,18 @@ # This file has been @generated from dataclasses import dataclass -from typing import ( - Dict, - List, -) +from typing import List import betterproto - - -class ScalarType(betterproto.Enum): - """ - The Scalar Type enumeration.enum - List of the different scalar types in Nada. - """ - - INTEGER = 0 - UNSIGNED_INTEGER = 1 - BOOLEAN = 2 - SECRET_INTEGER = 3 - SECRET_UNSIGNED_INTEGER = 4 - SECRET_BOOLEAN = 5 - SECRET_BLOB = 6 - SECRET_NON_ZERO_INTEGER = 7 - SECRET_NON_ZERO_UNSIGNED_INTEGER = 8 - ECDSA_PRIVATE_KEY = 9 - """ECDSA private key for the threshold ecdsa signature feature.""" - - ECDSA_DIGEST_MESSAGE = 10 - """Public ECDSA message digest.""" - - -@dataclass(eq=False, repr=False) -class Object(betterproto.Message): - """Object: key-value hash map.""" - - types: Dict[str, "NadaType"] = betterproto.map_field( - 1, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE - ) - """/ Key-value types.""" +import betterproto.lib.google.protobuf as betterproto_lib_google_protobuf @dataclass(eq=False, repr=False) class Array(betterproto.Message): """Array type, defines a collection of homogeneous values""" - inner_type: "NadaType" = betterproto.message_field(1) - """Inner type of the elements of this array""" + contained_type: "NadaType" = betterproto.message_field(1) + """Type of the elements of this array""" size: int = betterproto.uint32_field(2) """Size of the array""" @@ -67,22 +33,58 @@ class Tuple(betterproto.Message): @dataclass(eq=False, repr=False) -class NTuple(betterproto.Message): - """NTuple: any number of heterogeneous values.""" +class Ntuple(betterproto.Message): + fields: List["NadaType"] = betterproto.message_field(1) + """Number of elements in the tuple""" + - types: List["NadaType"] = betterproto.message_field(1) - """NTuple types.""" +@dataclass(eq=False, repr=False) +class ObjectEntry(betterproto.Message): + name: str = betterproto.string_field(1) + """Name of the field""" + + type: "NadaType" = betterproto.message_field(2) + """Type of the field""" @dataclass(eq=False, repr=False) -class CompositeType(betterproto.Message): - array: "Array" = betterproto.message_field(1, group="composite_type") - tuple: "Tuple" = betterproto.message_field(2, group="composite_type") - ntuple: "NTuple" = betterproto.message_field(3, group="composite_type") - object: "Object" = betterproto.message_field(4, group="composite_type") +class Object(betterproto.Message): + fields: List["ObjectEntry"] = betterproto.message_field(1) + """ + Fields of the object, we use repeated ObjectEntry ordered by name to have deterministic compilation + """ @dataclass(eq=False, repr=False) class NadaType(betterproto.Message): - scalar: "ScalarType" = betterproto.enum_field(1, group="nada_type") - composite: "CompositeType" = betterproto.message_field(2, group="nada_type") + integer: "betterproto_lib_google_protobuf.Empty" = betterproto.message_field( + 1, group="nada_type" + ) + unsigned_integer: "betterproto_lib_google_protobuf.Empty" = ( + betterproto.message_field(2, group="nada_type") + ) + boolean: "betterproto_lib_google_protobuf.Empty" = betterproto.message_field( + 3, group="nada_type" + ) + secret_integer: "betterproto_lib_google_protobuf.Empty" = betterproto.message_field( + 4, group="nada_type" + ) + secret_unsigned_integer: "betterproto_lib_google_protobuf.Empty" = ( + betterproto.message_field(5, group="nada_type") + ) + secret_boolean: "betterproto_lib_google_protobuf.Empty" = betterproto.message_field( + 6, group="nada_type" + ) + ecdsa_private_key: "betterproto_lib_google_protobuf.Empty" = ( + betterproto.message_field(7, group="nada_type") + ) + ecdsa_digest_message: "betterproto_lib_google_protobuf.Empty" = ( + betterproto.message_field(8, group="nada_type") + ) + ecdsa_signature: "betterproto_lib_google_protobuf.Empty" = ( + betterproto.message_field(9, group="nada_type") + ) + array: "Array" = betterproto.message_field(10, group="nada_type") + tuple: "Tuple" = betterproto.message_field(11, group="nada_type") + ntuple: "Ntuple" = betterproto.message_field(12, group="nada_type") + object: "Object" = betterproto.message_field(13, group="nada_type") diff --git a/pyproject.toml b/pyproject.toml index 5a2cb22..429fd7b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,6 +14,8 @@ dependencies = [ "parsial~=0.1", "sortedcontainers~=2.4", "typing_extensions~=4.12.2", + "nada-mir-proto==0.1.0", + "types-protobuf~=5.29" ] classifiers = ["License :: OSI Approved :: Apache Software License"] license = { file = "LICENSE" } diff --git a/tests/compile_test.py b/tests/compile_test.py index 3045d29..68806d5 100644 --- a/tests/compile_test.py +++ b/tests/compile_test.py @@ -6,18 +6,21 @@ import os import json import pytest -from nada_dsl.ast_util import AST_OPERATIONS +from betterproto.lib.google.protobuf import Empty + +from nada_mir_proto.nillion.nada.mir import v1 as proto_mir +from nada_mir_proto.nillion.nada.types import v1 as proto_ty +from nada_mir_proto.nillion.nada.operations import v1 as proto_op + +from nada_dsl.ast_util import AST_OPERATIONS, OperationId from nada_dsl.compile import compile_script, compile_string, print_output -from nada_dsl.compiler_frontend import FUNCTIONS, INPUTS, PARTIES from nada_dsl.errors import NotAllowedException @pytest.fixture(autouse=True) def clean_inputs(): - PARTIES.clear() - INPUTS.clear() - FUNCTIONS.clear() AST_OPERATIONS.clear() + OperationId.reset() yield @@ -30,9 +33,9 @@ def get_test_programs_folder(): def test_compile_sum_integers(): - mir_str = compile_script(f"{get_test_programs_folder()}/sum_integers.py").mir - assert mir_str != "" - mir = json.loads(mir_str) + mir_bytes = compile_script(f"{get_test_programs_folder()}/sum_integers.py").mir + assert len(mir_bytes) > 0 + mir = proto_mir.ProgramMir().parse(mir_bytes) # The MIR operations look like this: # - 2 InputReference # - 1 LiteralReference for the initial accumulator @@ -41,18 +44,21 @@ def test_compile_sum_integers(): literal_id = 0 input_ids = [] additions = {} - for operation in mir["operations"].values(): - for name, op in operation.items(): - op_id = op["id"] - if name == "LiteralReference": - literal_id = op_id - assert op["type"] == "Integer" - elif name == "InputReference": - input_ids.append(op_id) - elif name == "Addition": - additions[op_id] = op - else: - raise Exception(f"Unexpected operation: {name}") + for entry in mir.operations: + op_id, operation = entry.id, entry.operation + if hasattr(operation, "literal_ref"): + literal_id = op_id + assert operation.type == proto_ty.NadaType(integer=Empty()) + elif hasattr(operation, "input_ref"): + input_ids.append(op_id) + elif ( + hasattr(operation, "binary") + and operation.binary.variant == proto_op.BinaryOperationVariant.ADDITION + ): + additions[op_id] = operation.binary + else: + raise Exception(f"Unexpected operation: {operation}") + assert literal_id != 0 assert len(input_ids) == 2 assert len(additions) == 2 @@ -62,8 +68,8 @@ def test_compile_sum_integers(): # left: addition, right: input reference second_addition_found = False for addition in additions.values(): - left_id = addition["left"] - right_id = addition["right"] + left_id = addition.left + right_id = addition.right if left_id in input_ids and right_id == literal_id: first_addition_found = True if left_id in additions.keys() and right_id in input_ids: @@ -95,35 +101,42 @@ def add_times(a: SecretInteger, b: SecretInteger) -> SecretInteger: def test_compile_map_simple(): - mir_str = compile_script(f"{get_test_programs_folder()}/map_simple.py").mir - assert mir_str != "" - mir = json.loads(mir_str) - assert len(mir["operations"]) == 2 - assert len(mir["functions"]) == 1 - function_id = mir["functions"][0]["id"] + mir_bytes = compile_script(f"{get_test_programs_folder()}/map_simple.py").mir + assert len(mir_bytes) > 0 + mir = proto_mir.ProgramMir().parse(mir_bytes) + + assert len(mir.operations) == 2 + assert len(mir.functions) == 1 + function_id = mir.functions[0].id operations_found = 0 array_input_id = 0 map_inner = 0 - output_id = mir["outputs"][0]["operation_id"] + output_id = mir.outputs[0].operation_id function_op_id = 0 - for operation in mir["operations"].values(): - for name, op in operation.items(): - op_id = op["id"] - if name == "InputReference": - array_input_id = op_id - assert op["type"] == { - "Array": {"inner_type": "SecretInteger", "size": 3} - } - operations_found += 1 - elif name == "Map": - assert op["fn"] == function_id - map_inner = op["inner"] - function_op_id = op["id"] - operations_found += 1 - else: - raise Exception(f"Unexpected operation: {name}") - assert map_inner > 0 and array_input_id > 0 and map_inner == array_input_id - assert function_op_id > 0 and output_id == function_op_id + for entry in mir.operations: + op_id, operation = entry.id, entry.operation + if hasattr(operation, "input_ref"): + array_input_id = op_id + assert operation.type == proto_ty.NadaType( + array=proto_ty.Array( + size=3, + contained_type=proto_ty.NadaType(secret_integer=Empty()), + ) + ) + + operations_found += 1 + elif hasattr(operation, "map"): + assert operation.map.fn == function_id + map_inner = operation.map.child + function_op_id = op_id + operations_found += 1 + else: + raise Exception(f"Unexpected operation: {operation}") + + assert map_inner > -1 + assert array_input_id > -1 + assert map_inner == array_input_id + assert 0 < function_op_id == output_id def test_compile_ecdsa_program(): @@ -139,15 +152,15 @@ def nada_main(): return [Output(new_int, "my_output", party1)] """ encoded_program_str = base64.b64encode(bytes(program_str, "utf-8")).decode("utf_8") - output = compile_string(encoded_program_str) - print_output(output) + mir_bytes = compile_string(encoded_program_str).mir + assert len(mir_bytes) > 0 def test_compile_ntuple(): - mir_str = compile_script(f"{get_test_programs_folder()}/ntuple_accessor.py").mir - assert mir_str != "" + mir_bytes = compile_script(f"{get_test_programs_folder()}/ntuple_accessor.py").mir + assert len(mir_bytes) > 0 def test_compile_object(): mir_str = compile_script(f"{get_test_programs_folder()}/object_accessor.py").mir - assert mir_str != "" + assert len(mir_str) > 0 diff --git a/tests/compiler_frontend_test.py b/tests/compiler_frontend_test.py index 5b664d0..3a3f6c0 100644 --- a/tests/compiler_frontend_test.py +++ b/tests/compiler_frontend_test.py @@ -7,6 +7,11 @@ import operator from typing import Any import pytest +from betterproto.lib.google.protobuf import Empty + +from nada_mir_proto.nillion.nada.operations import v1 as proto_op +from nada_mir_proto.nillion.nada.types import v1 as proto_ty + from nada_dsl.ast_util import ( AST_OPERATIONS, BinaryASTOperation, @@ -15,6 +20,7 @@ NadaFunctionASTOperation, ReduceASTOperation, UnaryASTOperation, + OperationId, ) # pylint: disable=wildcard-import,unused-wildcard-import @@ -23,27 +29,18 @@ from nada_dsl.compiler_frontend import ( nada_dsl_to_nada_mir, to_input_list, - process_operation, - INPUTS, - PARTIES, - FUNCTIONS, - traverse_and_process_operations, + process_operation, CompilationContext, ) from nada_dsl.nada_types import AllTypes, Party from nada_dsl.nada_types.collections import Array, Tuple, NTuple, Object, unzip -from nada_dsl.nada_types.function import ( - NadaFunctionArg, - NadaFunctionCall, - create_nada_fn, -) +from nada_dsl.nada_types.function import NadaFunctionArg +from tests.scalar_type_test import secret_integers @pytest.fixture(autouse=True) def clean_inputs(): - PARTIES.clear() - INPUTS.clear() - FUNCTIONS.clear() AST_OPERATIONS.clear() + OperationId.reset() yield @@ -87,31 +84,32 @@ def test_root_conversion(): input = create_input(SecretInteger, "input", "input_party") output = create_output(input, "output", "output_party") mir = nada_dsl_to_nada_mir([output]) - assert len(mir["parties"]) == 2 - assert len(mir["inputs"]) == 1 - assert len(mir["literals"]) == 0 - assert len(mir["outputs"]) == 1 - assert "source_files" in mir + assert len(mir.parties) == 2 + assert len(mir.inputs) == 1 + assert len(mir.literals) == 0 + assert len(mir.outputs) == 1 - operations = mir["operations"] - mir_output = mir["outputs"][0] - assert mir_output["name"] == "output" - assert mir_output["type"] == "SecretInteger" - assert mir_output["party"] == "output_party" + operations = mir.operations + mir_output = mir.outputs[0] + assert mir_output.name == "output" + assert mir_output.type == proto_ty.NadaType(secret_integer=Empty()) + assert mir_output.party == "output_party" - assert list(operations[mir_output["operation_id"]].keys()) == ["InputReference"] + assert list(filter(lambda op: op.id == mir_output.operation_id, operations))[0].operation.input_ref def test_input_conversion(): input = Input(name="input", party=Party("party")) - inputs = {"party": {"input": (input, "SecretInteger")}} + input.store_in_ast(proto_ty.NadaType(secret_integer=Empty())) + inputs = { 0: AST_OPERATIONS[0] } + converted_inputs = to_input_list(inputs) assert len(converted_inputs) == 1 converted = converted_inputs[0] - assert converted["name"] == "input" - assert converted["party"] == "party" - assert converted["type"] == "SecretInteger" + assert converted.name == "input" + assert converted.party == "party" + assert converted.type == proto_ty.NadaType(secret_integer=Empty()) def test_duplicated_inputs_checks(): @@ -130,7 +128,7 @@ def test_array_type_conversion(input_type, type_name, size): inner_input = create_input(SecretInteger, "name", "party", **{}) collection = create_collection(input_type, inner_input, size, **{}) converted_input = collection.type().to_mir() - assert list(converted_input.keys()) == [type_name] + assert converted_input.array @pytest.mark.parametrize( @@ -147,21 +145,21 @@ def test_zip(input_type, input_name): zipped = left.zip(right) assert isinstance(zipped, Array) zip_ast = AST_OPERATIONS[zipped.child.id] - op = process_operation(zip_ast, {}).mir - assert list(op.keys()) == ["Zip"] + op = process_operation(zip_ast, CompilationContext()) + assert op.binary.variant == proto_op.BinaryOperationVariant.ZIP - zip_mir = op["Zip"] + zip_mir = op - left = AST_OPERATIONS[zip_mir["left"]] - right = AST_OPERATIONS[zip_mir["right"]] + left = AST_OPERATIONS[op.binary.left] + right = AST_OPERATIONS[op.binary.right] assert left.name == "left" assert right.name == "right" - assert zip_mir["type"][input_name]["inner_type"] == { - "Tuple": { - "left_type": "SecretInteger", - "right_type": "SecretInteger", - } - } + assert zip_mir.type.array.contained_type.tuple.right == proto_ty.NadaType( + secret_integer=Empty() + ) + assert zip_mir.type.array.contained_type.tuple.left == proto_ty.NadaType( + secret_integer=Empty() + ) @pytest.mark.parametrize( @@ -177,21 +175,27 @@ def test_unzip(input_type: type[Array]): assert isinstance(unzipped, Tuple) unzip_ast = AST_OPERATIONS[unzipped.child.id] assert isinstance(unzip_ast, UnaryASTOperation) - assert unzip_ast.name == "Unzip" + assert unzip_ast.variant == proto_op.UnaryOperationVariant.UNZIP - op = process_operation(AST_OPERATIONS[unzipped.child.id], {}).mir + op = process_operation(AST_OPERATIONS[unzipped.child.id], CompilationContext()) - unzip_mir = op["Unzip"] + unzip_mir = op # Check that the child operation points to a Zip - zip_ast = AST_OPERATIONS[unzip_mir["this"]] + zip_ast = AST_OPERATIONS[unzip_mir.unary.this] assert isinstance(zip_ast, BinaryASTOperation) - assert zip_ast.name == "Zip" - assert unzip_mir["type"] == { - "Tuple": { - "left_type": {"Array": {"inner_type": "SecretInteger", "size": 10}}, - "right_type": {"Array": {"inner_type": "SecretInteger", "size": 10}}, - } - } + assert zip_ast.variant == proto_op.BinaryOperationVariant.ZIP + assert unzip_mir.type.tuple.left == proto_ty.NadaType( + array=proto_ty.Array( + contained_type=proto_ty.NadaType(secret_integer=Empty()), + size=10, + ) + ) + assert unzip_mir.type.tuple.right == proto_ty.NadaType( + array=proto_ty.Array( + contained_type=proto_ty.NadaType(secret_integer=Empty()), + size=10, + ) + ) @pytest.mark.parametrize( @@ -209,16 +213,15 @@ def nada_function(a: SecretInteger) -> SecretInteger: left = create_collection(input_type, inner_input, 10, **{}) map_operation = left.map(nada_function) - process_output = process_operation(AST_OPERATIONS[map_operation.child.id], {}) - op = process_output.mir - extra_fn = process_output.extra_function - assert list(op.keys()) == ["Map"] - child = op["Map"] - assert child["fn"] == extra_fn.id - assert list(child["type"].keys()) == [input_name] - inner_inner = AST_OPERATIONS[child["inner"]] + ctx = CompilationContext() + op = process_operation(AST_OPERATIONS[map_operation.child.id], ctx) + extra_fn = list(ctx.functions.values())[0] + assert op.map + assert op.map.fn == extra_fn.id + assert op.type.array + assert op.type.array.contained_type == proto_ty.NadaType(secret_integer=Empty()) + inner_inner = AST_OPERATIONS[op.map.child] assert inner_inner.name == "child" - assert child["type"][input_name]["inner_type"] == "SecretInteger" @pytest.mark.parametrize( @@ -233,23 +236,22 @@ def test_reduce(input_type: type[Array]): def nada_function(a: SecretInteger, b: SecretInteger) -> SecretInteger: return a + b - inner_input = create_input(SecretInteger, "child", "party", **{}) + inner_input = create_input(SecretInteger, "input", "party", **{}) left = create_collection(input_type, inner_input, 10, **{}) reduce_operation = left.reduce(nada_function, c) reduce_ast = AST_OPERATIONS[reduce_operation.child.id] assert isinstance(reduce_ast, ReduceASTOperation) - process_output = process_operation(reduce_ast, {}) - op = process_output.mir - extra_fn = process_output.extra_function - - assert list(op.keys()) == ["Reduce"] - child = op["Reduce"] - assert child["fn"] == extra_fn.id - assert child["type"] == "SecretInteger" - inner_inner = AST_OPERATIONS[child["inner"]] - assert inner_inner.name == "child" + ctx = CompilationContext() + op = process_operation(reduce_ast, ctx) + extra_fn = list(ctx.functions.values())[0] + + assert op.reduce + assert op.reduce.fn == extra_fn.id + assert op.type == proto_ty.NadaType(secret_integer=Empty()) + inner_inner = AST_OPERATIONS[op.reduce.child] + assert inner_inner.name == "input" def check_arg(arg: NadaFunctionArg, arg_name, arg_type): @@ -269,21 +271,17 @@ def test_array_new(): second_input = create_input(SecretInteger, "second", "party", **{}) array = Array.new(first_input, second_input) - op = process_operation(AST_OPERATIONS[array.child.id], {}).mir + op = process_operation(AST_OPERATIONS[array.child.id], CompilationContext()) - assert list(op.keys()) == ["New"] + assert op.new - child = op["New"] - - first: InputASTOperation = AST_OPERATIONS[child["elements"][0]] # type: ignore - second: InputASTOperation = AST_OPERATIONS[child["elements"][1]] # type: ignore + first: InputASTOperation = AST_OPERATIONS[op.new.elements[0]] # type: ignore + second: InputASTOperation = AST_OPERATIONS[op.new.elements[1]] # type: ignore assert first.name == "first" assert second.name == "second" - assert child["type"]["Array"] == { - "inner_type": "SecretInteger", - "size": 2, - } + assert op.type.array.contained_type == proto_ty.NadaType(secret_integer=Empty()) + assert op.type.array.size == 2 def test_array_new_empty(): @@ -307,20 +305,16 @@ def test_tuple_new(): tuple = Tuple.new(first_input, second_input) array_ast = AST_OPERATIONS[tuple.child.id] - op = process_operation(array_ast, {}).mir - - assert list(op.keys()) == ["New"] + op = process_operation(array_ast, CompilationContext()) - child = op["New"] + assert op.new - left_ast = AST_OPERATIONS[child["elements"][0]] - right_ast = AST_OPERATIONS[child["elements"][1]] + left_ast = AST_OPERATIONS[op.new.elements[0]] + right_ast = AST_OPERATIONS[op.new.elements[1]] assert left_ast.name == "first" assert right_ast.name == "second" - assert child["type"]["Tuple"] == { - "left_type": "SecretInteger", - "right_type": "Integer", - } + assert op.type.tuple.left == proto_ty.NadaType(secret_integer=Empty()) + assert op.type.tuple.right == proto_ty.NadaType(integer=Empty()) def test_tuple_new_empty(): @@ -339,22 +333,21 @@ def test_n_tuple_new(): tuple = NTuple.new([first_input, second_input, third_input]) array_ast = AST_OPERATIONS[tuple.child.id] - op = process_operation(array_ast, {}).mir + op = process_operation(array_ast, CompilationContext()) - assert list(op.keys()) == ["New"] + assert op.new - child = op["New"] - - first_ast = AST_OPERATIONS[child["elements"][0]] - second_ast = AST_OPERATIONS[child["elements"][1]] - third_ast = AST_OPERATIONS[child["elements"][2]] + first_ast = AST_OPERATIONS[op.new.elements[0]] + second_ast = AST_OPERATIONS[op.new.elements[1]] + third_ast = AST_OPERATIONS[op.new.elements[2]] assert first_ast.name == "first" assert second_ast.name == "second" assert third_ast.name == "third" - print(f"child = {child}") - assert child["type"]["NTuple"] == { - "types": ["SecretInteger", "Integer", "SecretInteger"], - } + assert op.type.ntuple.fields == [ + proto_ty.NadaType(secret_integer=Empty()), + proto_ty.NadaType(integer=Empty()), + proto_ty.NadaType(secret_integer=Empty()), + ] def test_object_new(): @@ -364,84 +357,127 @@ def test_object_new(): object = Object.new({"a": first_input, "b": second_input, "c": third_input}) array_ast = AST_OPERATIONS[object.child.id] - op = process_operation(array_ast, {}).mir - - assert list(op.keys()) == ["New"] + op = process_operation(array_ast, CompilationContext()) - child = op["New"] + assert op.new - first_ast = AST_OPERATIONS[child["elements"][0]] - second_ast = AST_OPERATIONS[child["elements"][1]] - third_ast = AST_OPERATIONS[child["elements"][2]] + first_ast = AST_OPERATIONS[op.new.elements[0]] + second_ast = AST_OPERATIONS[op.new.elements[1]] + third_ast = AST_OPERATIONS[op.new.elements[2]] assert first_ast.name == "first" assert second_ast.name == "second" assert third_ast.name == "third" - print(f"child = {child}") - assert child["type"]["Object"] == { - "types": {"a": "SecretInteger", "b": "Integer", "c": "SecretInteger"}, - } + + assert op.type.object.fields == [ + proto_ty.ObjectEntry(name="a", type=proto_ty.NadaType(secret_integer=Empty())), + proto_ty.ObjectEntry(name="b", type=proto_ty.NadaType(integer=Empty())), + proto_ty.ObjectEntry(name="c", type=proto_ty.NadaType(secret_integer=Empty())), + ] @pytest.mark.parametrize( - ("binary_operator", "name", "ty"), + ("binary_operator", "ty"), [ - (operator.add, "LiteralReference", "Integer"), - (operator.sub, "LiteralReference", "Integer"), - (operator.mul, "LiteralReference", "Integer"), - (operator.truediv, "LiteralReference", "Integer"), - (operator.mod, "LiteralReference", "Integer"), - (operator.pow, "LiteralReference", "Integer"), - (operator.lt, "LiteralReference", "Boolean"), - (operator.gt, "LiteralReference", "Boolean"), - (operator.le, "LiteralReference", "Boolean"), - (operator.ge, "LiteralReference", "Boolean"), - (operator.eq, "LiteralReference", "Boolean"), + (operator.add, proto_ty.NadaType(integer=Empty())), + (operator.sub, proto_ty.NadaType(integer=Empty())), + (operator.mul, proto_ty.NadaType(integer=Empty())), + (operator.truediv, proto_ty.NadaType(integer=Empty())), + (operator.mod, proto_ty.NadaType(integer=Empty())), + (operator.pow, proto_ty.NadaType(integer=Empty())), + (operator.lt, proto_ty.NadaType(boolean=Empty())), + (operator.gt, proto_ty.NadaType(boolean=Empty())), + (operator.le, proto_ty.NadaType(boolean=Empty())), + (operator.ge, proto_ty.NadaType(boolean=Empty())), + (operator.eq, proto_ty.NadaType(boolean=Empty())), ], ) -def test_binary_operator_integer_integer(binary_operator, name, ty): +def test_binary_operator_integer_integer(binary_operator, ty): left = create_literal(Integer, -2) right = create_literal(Integer, -2) program_operation = binary_operator(left, right) # recover operation from AST ast_operation = AST_OPERATIONS[program_operation.child.id] - op = process_operation(ast_operation, {}).mir - assert list(op.keys()) == [name] - child = op[name] - assert child["type"] == to_mir(ty) + op = process_operation(ast_operation, CompilationContext()) + assert op.literal_ref + assert op.type == ty @pytest.mark.parametrize( - ("operator", "name", "ty"), + ("operator", "variant", "ty"), [ - (operator.add, "Addition", "PublicInteger"), - (operator.sub, "Subtraction", "PublicInteger"), - (operator.mul, "Multiplication", "PublicInteger"), - (operator.truediv, "Division", "PublicInteger"), - (operator.mod, "Modulo", "PublicInteger"), - (operator.pow, "Power", "PublicInteger"), - (operator.lt, "LessThan", "PublicBoolean"), - (operator.gt, "GreaterThan", "PublicBoolean"), - (operator.le, "LessOrEqualThan", "PublicBoolean"), - (operator.ge, "GreaterOrEqualThan", "PublicBoolean"), - (operator.eq, "Equals", "PublicBoolean"), + ( + operator.add, + proto_op.BinaryOperationVariant.ADDITION, + proto_ty.NadaType(integer=Empty()), + ), + ( + operator.sub, + proto_op.BinaryOperationVariant.SUBTRACTION, + proto_ty.NadaType(integer=Empty()), + ), + ( + operator.mul, + proto_op.BinaryOperationVariant.MULTIPLICATION, + proto_ty.NadaType(integer=Empty()), + ), + ( + operator.truediv, + proto_op.BinaryOperationVariant.DIVISION, + proto_ty.NadaType(integer=Empty()), + ), + ( + operator.mod, + proto_op.BinaryOperationVariant.MODULO, + proto_ty.NadaType(integer=Empty()), + ), + ( + operator.pow, + proto_op.BinaryOperationVariant.POWER, + proto_ty.NadaType(integer=Empty()), + ), + ( + operator.lt, + proto_op.BinaryOperationVariant.LESS_THAN, + proto_ty.NadaType(boolean=Empty()), + ), + ( + operator.gt, + proto_op.BinaryOperationVariant.GREATER_THAN, + proto_ty.NadaType(boolean=Empty()), + ), + ( + operator.le, + proto_op.BinaryOperationVariant.LESS_EQ, + proto_ty.NadaType(boolean=Empty()), + ), + ( + operator.ge, + proto_op.BinaryOperationVariant.GREATER_EQ, + proto_ty.NadaType(boolean=Empty()), + ), + ( + operator.eq, + proto_op.BinaryOperationVariant.EQUALS, + proto_ty.NadaType(boolean=Empty()), + ), ], ) -def test_binary_operator_integer_publicinteger(operator, name, ty): +def test_binary_operator_integer_publicinteger(operator, variant, ty): left = create_literal(Integer, -3) right = create_input(PublicInteger, "right", "party") program_operation = operator(left, right) # recover operation from AST ast_operation = AST_OPERATIONS[program_operation.child.id] - op = process_operation(ast_operation, {}).mir - assert list(op.keys()) == [name] - child = op[name] - left_ast = AST_OPERATIONS[child["left"]] - right_ast = AST_OPERATIONS[child["right"]] + op = process_operation(ast_operation, CompilationContext()) + assert op.binary.variant == variant + + left_ast = AST_OPERATIONS[op.binary.left] + right_ast = AST_OPERATIONS[op.binary.right] assert isinstance(left_ast, LiteralASTOperation) assert left_ast.value == -3 assert isinstance(right_ast, InputASTOperation) assert right_ast.name == "right" - assert child["type"] == to_mir(ty) + assert op.type == ty def test_logical_operations(): @@ -473,14 +509,14 @@ def test_not(): bool1 = SecretBoolean(Input(name="my_bool_1", party=party1)) operation = ~bool1 ast = AST_OPERATIONS[operation.child.id] - op = process_operation(ast, {}).mir - assert list(op.keys()) == ["Not"] + op = process_operation(ast, CompilationContext()) + assert op.unary.variant == proto_op.UnaryOperationVariant.NOT bool1 = PublicBoolean(Input(name="my_bool_1", party=party1)) operation = ~bool1 ast = AST_OPERATIONS[operation.child.id] - op = process_operation(ast, {}).mir - assert list(op.keys()) == ["Not"] + op = process_operation(ast, CompilationContext()) + assert op.unary.variant == proto_op.UnaryOperationVariant.NOT bool1 = Boolean(True) bool2 = ~bool1 diff --git a/uv.lock b/uv.lock index bc07a9b..1d9eb9f 100644 --- a/uv.lock +++ b/uv.lock @@ -21,6 +21,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7e/b3/6b4067be973ae96ba0d615946e314c5ae35f9f993eca561b356540bb0c2b/alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b", size = 13929 }, ] +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, +] + [[package]] name = "astroid" version = "3.3.5" @@ -192,7 +201,7 @@ name = "click" version = "8.1.7" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "colorama", marker = "platform_system == 'Windows'" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/96/d3/f04c7bfcf5c1862a2a5b845c6b2b360488cf47af55dfa79c98f6a6bf98b5/click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de", size = 336121 } wheels = [ @@ -623,9 +632,11 @@ version = "0.7.1" source = { editable = "." } dependencies = [ { name = "asttokens" }, + { name = "nada-mir-proto" }, { name = "parsial" }, { name = "richreports" }, { name = "sortedcontainers" }, + { name = "types-protobuf" }, { name = "typing-extensions" }, ] @@ -663,6 +674,7 @@ dev = [ [package.metadata] requires-dist = [ { name = "asttokens", specifier = "~=2.4" }, + { name = "nada-mir-proto", editable = "nada_mir" }, { name = "parsial", specifier = "~=0.1" }, { name = "pylint", marker = "extra == 'lint'", specifier = ">=2.17,<3.4" }, { name = "pytest", marker = "extra == 'test'", specifier = ">=7.4,<9.0" }, @@ -673,6 +685,7 @@ requires-dist = [ { name = "sphinx-autoapi", marker = "extra == 'docs'", specifier = "~=3.3.2" }, { name = "sphinx-rtd-theme", marker = "extra == 'docs'", specifier = ">=1.0,<3.1" }, { name = "toml", marker = "extra == 'docs'", specifier = "~=0.10.2" }, + { name = "types-protobuf", specifier = "~=5.29" }, { name = "typing-extensions", specifier = "~=4.12.2" }, ] @@ -699,6 +712,7 @@ source = { editable = "nada_mir" } dependencies = [ { name = "betterproto" }, { name = "grpcio-tools" }, + { name = "pydantic" }, ] [package.optional-dependencies] @@ -711,6 +725,7 @@ requires-dist = [ { name = "betterproto", specifier = "==2.0.0b7" }, { name = "betterproto", extras = ["compiler"], marker = "extra == 'dev'", specifier = "==2.0.0b7" }, { name = "grpcio-tools", specifier = "==1.62.3" }, + { name = "pydantic", specifier = "==2.10.3" }, ] [[package]] @@ -772,6 +787,95 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/33/90/f198a61df8381fb43ae0fe81b3d2718e8dcc51ae8502c7657ab9381fbc4f/protobuf-4.25.5-py3-none-any.whl", hash = "sha256:0aebecb809cae990f8129ada5ca273d9d670b76d9bfc9b1809f0a9c02b7dbf41", size = 156467 }, ] +[[package]] +name = "pydantic" +version = "2.10.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/45/0f/27908242621b14e649a84e62b133de45f84c255eecb350ab02979844a788/pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9", size = 786486 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/51/72c18c55cf2f46ff4f91ebcc8f75aa30f7305f3d726be3f4ebffb4ae972b/pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d", size = 456997 }, +] + +[[package]] +name = "pydantic-core" +version = "2.27.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a6/9f/7de1f19b6aea45aeb441838782d68352e71bfa98ee6fa048d5041991b33e/pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235", size = 412785 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6e/ce/60fd96895c09738648c83f3f00f595c807cb6735c70d3306b548cc96dd49/pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a", size = 1897984 }, + { url = "https://files.pythonhosted.org/packages/fd/b9/84623d6b6be98cc209b06687d9bca5a7b966ffed008d15225dd0d20cce2e/pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b", size = 1807491 }, + { url = "https://files.pythonhosted.org/packages/01/72/59a70165eabbc93b1111d42df9ca016a4aa109409db04304829377947028/pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278", size = 1831953 }, + { url = "https://files.pythonhosted.org/packages/7c/0c/24841136476adafd26f94b45bb718a78cb0500bd7b4f8d667b67c29d7b0d/pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05", size = 1856071 }, + { url = "https://files.pythonhosted.org/packages/53/5e/c32957a09cceb2af10d7642df45d1e3dbd8596061f700eac93b801de53c0/pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4", size = 2038439 }, + { url = "https://files.pythonhosted.org/packages/e4/8f/979ab3eccd118b638cd6d8f980fea8794f45018255a36044dea40fe579d4/pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f", size = 2787416 }, + { url = "https://files.pythonhosted.org/packages/02/1d/00f2e4626565b3b6d3690dab4d4fe1a26edd6a20e53749eb21ca892ef2df/pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08", size = 2134548 }, + { url = "https://files.pythonhosted.org/packages/9d/46/3112621204128b90898adc2e721a3cd6cf5626504178d6f32c33b5a43b79/pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6", size = 1989882 }, + { url = "https://files.pythonhosted.org/packages/49/ec/557dd4ff5287ffffdf16a31d08d723de6762bb1b691879dc4423392309bc/pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807", size = 1995829 }, + { url = "https://files.pythonhosted.org/packages/6e/b2/610dbeb74d8d43921a7234555e4c091cb050a2bdb8cfea86d07791ce01c5/pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c", size = 2091257 }, + { url = "https://files.pythonhosted.org/packages/8c/7f/4bf8e9d26a9118521c80b229291fa9558a07cdd9a968ec2d5c1026f14fbc/pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206", size = 2143894 }, + { url = "https://files.pythonhosted.org/packages/1f/1c/875ac7139c958f4390f23656fe696d1acc8edf45fb81e4831960f12cd6e4/pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c", size = 1816081 }, + { url = "https://files.pythonhosted.org/packages/d7/41/55a117acaeda25ceae51030b518032934f251b1dac3704a53781383e3491/pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17", size = 1981109 }, + { url = "https://files.pythonhosted.org/packages/27/39/46fe47f2ad4746b478ba89c561cafe4428e02b3573df882334bd2964f9cb/pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8", size = 1895553 }, + { url = "https://files.pythonhosted.org/packages/1c/00/0804e84a78b7fdb394fff4c4f429815a10e5e0993e6ae0e0b27dd20379ee/pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330", size = 1807220 }, + { url = "https://files.pythonhosted.org/packages/01/de/df51b3bac9820d38371f5a261020f505025df732ce566c2a2e7970b84c8c/pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52", size = 1829727 }, + { url = "https://files.pythonhosted.org/packages/5f/d9/c01d19da8f9e9fbdb2bf99f8358d145a312590374d0dc9dd8dbe484a9cde/pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4", size = 1854282 }, + { url = "https://files.pythonhosted.org/packages/5f/84/7db66eb12a0dc88c006abd6f3cbbf4232d26adfd827a28638c540d8f871d/pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c", size = 2037437 }, + { url = "https://files.pythonhosted.org/packages/34/ac/a2537958db8299fbabed81167d58cc1506049dba4163433524e06a7d9f4c/pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de", size = 2780899 }, + { url = "https://files.pythonhosted.org/packages/4a/c1/3e38cd777ef832c4fdce11d204592e135ddeedb6c6f525478a53d1c7d3e5/pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025", size = 2135022 }, + { url = "https://files.pythonhosted.org/packages/7a/69/b9952829f80fd555fe04340539d90e000a146f2a003d3fcd1e7077c06c71/pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e", size = 1987969 }, + { url = "https://files.pythonhosted.org/packages/05/72/257b5824d7988af43460c4e22b63932ed651fe98804cc2793068de7ec554/pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919", size = 1994625 }, + { url = "https://files.pythonhosted.org/packages/73/c3/78ed6b7f3278a36589bcdd01243189ade7fc9b26852844938b4d7693895b/pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c", size = 2090089 }, + { url = "https://files.pythonhosted.org/packages/8d/c8/b4139b2f78579960353c4cd987e035108c93a78371bb19ba0dc1ac3b3220/pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc", size = 2142496 }, + { url = "https://files.pythonhosted.org/packages/3e/f8/171a03e97eb36c0b51981efe0f78460554a1d8311773d3d30e20c005164e/pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9", size = 1811758 }, + { url = "https://files.pythonhosted.org/packages/6a/fe/4e0e63c418c1c76e33974a05266e5633e879d4061f9533b1706a86f77d5b/pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5", size = 1980864 }, + { url = "https://files.pythonhosted.org/packages/50/fc/93f7238a514c155a8ec02fc7ac6376177d449848115e4519b853820436c5/pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89", size = 1864327 }, + { url = "https://files.pythonhosted.org/packages/be/51/2e9b3788feb2aebff2aa9dfbf060ec739b38c05c46847601134cc1fed2ea/pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f", size = 1895239 }, + { url = "https://files.pythonhosted.org/packages/7b/9e/f8063952e4a7d0127f5d1181addef9377505dcce3be224263b25c4f0bfd9/pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02", size = 1805070 }, + { url = "https://files.pythonhosted.org/packages/2c/9d/e1d6c4561d262b52e41b17a7ef8301e2ba80b61e32e94520271029feb5d8/pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c", size = 1828096 }, + { url = "https://files.pythonhosted.org/packages/be/65/80ff46de4266560baa4332ae3181fffc4488ea7d37282da1a62d10ab89a4/pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac", size = 1857708 }, + { url = "https://files.pythonhosted.org/packages/d5/ca/3370074ad758b04d9562b12ecdb088597f4d9d13893a48a583fb47682cdf/pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb", size = 2037751 }, + { url = "https://files.pythonhosted.org/packages/b1/e2/4ab72d93367194317b99d051947c071aef6e3eb95f7553eaa4208ecf9ba4/pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529", size = 2733863 }, + { url = "https://files.pythonhosted.org/packages/8a/c6/8ae0831bf77f356bb73127ce5a95fe115b10f820ea480abbd72d3cc7ccf3/pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35", size = 2161161 }, + { url = "https://files.pythonhosted.org/packages/f1/f4/b2fe73241da2429400fc27ddeaa43e35562f96cf5b67499b2de52b528cad/pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089", size = 1993294 }, + { url = "https://files.pythonhosted.org/packages/77/29/4bb008823a7f4cc05828198153f9753b3bd4c104d93b8e0b1bfe4e187540/pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381", size = 2001468 }, + { url = "https://files.pythonhosted.org/packages/f2/a9/0eaceeba41b9fad851a4107e0cf999a34ae8f0d0d1f829e2574f3d8897b0/pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb", size = 2091413 }, + { url = "https://files.pythonhosted.org/packages/d8/36/eb8697729725bc610fd73940f0d860d791dc2ad557faaefcbb3edbd2b349/pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae", size = 2154735 }, + { url = "https://files.pythonhosted.org/packages/52/e5/4f0fbd5c5995cc70d3afed1b5c754055bb67908f55b5cb8000f7112749bf/pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c", size = 1833633 }, + { url = "https://files.pythonhosted.org/packages/ee/f2/c61486eee27cae5ac781305658779b4a6b45f9cc9d02c90cb21b940e82cc/pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16", size = 1986973 }, + { url = "https://files.pythonhosted.org/packages/df/a6/e3f12ff25f250b02f7c51be89a294689d175ac76e1096c32bf278f29ca1e/pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e", size = 1883215 }, + { url = "https://files.pythonhosted.org/packages/0f/d6/91cb99a3c59d7b072bded9959fbeab0a9613d5a4935773c0801f1764c156/pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073", size = 1895033 }, + { url = "https://files.pythonhosted.org/packages/07/42/d35033f81a28b27dedcade9e967e8a40981a765795c9ebae2045bcef05d3/pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08", size = 1807542 }, + { url = "https://files.pythonhosted.org/packages/41/c2/491b59e222ec7e72236e512108ecad532c7f4391a14e971c963f624f7569/pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf", size = 1827854 }, + { url = "https://files.pythonhosted.org/packages/e3/f3/363652651779113189cefdbbb619b7b07b7a67ebb6840325117cc8cc3460/pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737", size = 1857389 }, + { url = "https://files.pythonhosted.org/packages/5f/97/be804aed6b479af5a945daec7538d8bf358d668bdadde4c7888a2506bdfb/pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2", size = 2037934 }, + { url = "https://files.pythonhosted.org/packages/42/01/295f0bd4abf58902917e342ddfe5f76cf66ffabfc57c2e23c7681a1a1197/pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107", size = 2735176 }, + { url = "https://files.pythonhosted.org/packages/9d/a0/cd8e9c940ead89cc37812a1a9f310fef59ba2f0b22b4e417d84ab09fa970/pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51", size = 2160720 }, + { url = "https://files.pythonhosted.org/packages/73/ae/9d0980e286627e0aeca4c352a60bd760331622c12d576e5ea4441ac7e15e/pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a", size = 1992972 }, + { url = "https://files.pythonhosted.org/packages/bf/ba/ae4480bc0292d54b85cfb954e9d6bd226982949f8316338677d56541b85f/pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc", size = 2001477 }, + { url = "https://files.pythonhosted.org/packages/55/b7/e26adf48c2f943092ce54ae14c3c08d0d221ad34ce80b18a50de8ed2cba8/pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960", size = 2091186 }, + { url = "https://files.pythonhosted.org/packages/ba/cc/8491fff5b608b3862eb36e7d29d36a1af1c945463ca4c5040bf46cc73f40/pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23", size = 2154429 }, + { url = "https://files.pythonhosted.org/packages/78/d8/c080592d80edd3441ab7f88f865f51dae94a157fc64283c680e9f32cf6da/pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05", size = 1833713 }, + { url = "https://files.pythonhosted.org/packages/83/84/5ab82a9ee2538ac95a66e51f6838d6aba6e0a03a42aa185ad2fe404a4e8f/pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337", size = 1987897 }, + { url = "https://files.pythonhosted.org/packages/df/c3/b15fb833926d91d982fde29c0624c9f225da743c7af801dace0d4e187e71/pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5", size = 1882983 }, + { url = "https://files.pythonhosted.org/packages/7c/60/e5eb2d462595ba1f622edbe7b1d19531e510c05c405f0b87c80c1e89d5b1/pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6", size = 1894016 }, + { url = "https://files.pythonhosted.org/packages/61/20/da7059855225038c1c4326a840908cc7ca72c7198cb6addb8b92ec81c1d6/pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676", size = 1771648 }, + { url = "https://files.pythonhosted.org/packages/8f/fc/5485cf0b0bb38da31d1d292160a4d123b5977841ddc1122c671a30b76cfd/pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d", size = 1826929 }, + { url = "https://files.pythonhosted.org/packages/a1/ff/fb1284a210e13a5f34c639efc54d51da136074ffbe25ec0c279cf9fbb1c4/pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c", size = 1980591 }, + { url = "https://files.pythonhosted.org/packages/f1/14/77c1887a182d05af74f6aeac7b740da3a74155d3093ccc7ee10b900cc6b5/pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27", size = 1981326 }, + { url = "https://files.pythonhosted.org/packages/06/aa/6f1b2747f811a9c66b5ef39d7f02fbb200479784c75e98290d70004b1253/pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f", size = 1989205 }, + { url = "https://files.pythonhosted.org/packages/7a/d2/8ce2b074d6835f3c88d85f6d8a399790043e9fdb3d0e43455e72d19df8cc/pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed", size = 2079616 }, + { url = "https://files.pythonhosted.org/packages/65/71/af01033d4e58484c3db1e5d13e751ba5e3d6b87cc3368533df4c50932c8b/pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f", size = 2133265 }, + { url = "https://files.pythonhosted.org/packages/33/72/f881b5e18fbb67cf2fb4ab253660de3c6899dbb2dba409d0b757e3559e3d/pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c", size = 2001864 }, +] + [[package]] name = "pygments" version = "2.18.0" @@ -1121,6 +1225,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f9/b6/a447b5e4ec71e13871be01ba81f5dfc9d0af7e473da256ff46bc0e24026f/tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde", size = 37955 }, ] +[[package]] +name = "types-protobuf" +version = "5.29.1.20241207" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/70/89/b661a447139f665ccea8e39bfdd52a92f803df4b5de0e6001a3537feaacb/types_protobuf-5.29.1.20241207.tar.gz", hash = "sha256:2ebcadb8ab3ef2e3e2f067e0882906d64ba0dc65fc5b0fd7a8b692315b4a0be9", size = 59190 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/6e/cdf152187019d6f6d04066b23e48659d961b527e9c6d43b48459d160e332/types_protobuf-5.29.1.20241207-py3-none-any.whl", hash = "sha256:92893c42083e9b718c678badc0af7a9a1307b92afe1599e5cba5f3d35b668b2f", size = 73902 }, +] + [[package]] name = "typing-extensions" version = "4.12.2"