From e3d9f441e0eb1f3023923fc10a1d34390ed66a89 Mon Sep 17 00:00:00 2001 From: Constantine Date: Mon, 1 Jan 2024 09:01:33 -0800 Subject: [PATCH] bootstrapping (#176) * thoth grpc service. an unfortunate reformatting event, also, too * clean up dep declarations * Provide action upon Thoth inception. clean up parameters/etc dep clean up for kq/ep madness, yet again * Inline action upon Thoth inception. clean up close errors with KerlSpace * decent amount of logging/clean up on thoth dynamic membership For the moment, just disable the rebalancing test until I can pay some much needed attention to it. * Refactor Seal to have Any message type value() Allows Seals of arbitrary self describing protobuf messages * Fix mispelling of proto packages - d'oh Add leyden module, new proto services for such. * basic skeleton of Binder and Reconciliation service infra * MVStore Finally a use for the ProtobufDatatype - lol * Further flesh on the bone * Further flesh on the bone * The general idea. Flesh out the remaining comms, validation strategy, etc. Replication pattern established. * Flesh out get/unbind API * Woohoo! First successful replication. * correct reconciliation logic. track digests of bound. * General cleanup. Add README.md's button up Leyden for now * Moar cleanup. Expose dht in proc domain * test comms cleanup * refactor out subdomain behaviour from top level ProcessDomain with ProcessContainerDomain * correctly attach sources * moar cleanup * remove * cache? * process domain uses the event validation of the DHT's Ani * properly parameterize ProcessDomain Geebus :) * use active majority bootstrapping * revert RingIterator bootstrap majority * logging * use controlled identifier directly for seed from Node * override additional * amp logging * amp logging * targeted loggin * better config. just model for now * arg * again? * moar logging * decouple context * only unique * add BootstrapVerifiers to provide key state resolution via delegation to successors in joined group * fix compilation error * use stereotomy msg, add BootstrapVerifiersTest * Rename to Bootstrapper, add EventValidation functionality. Basically Ani for bootstrapping the view member's join * interim cannot seem to connect to entrance server for get key state. Weirdly intermittent connect (and subsequent fail on the retrieval) if debug/single stepping is happening. * Handle view join failure. Reseed if we cannot join the view. * logging * Bootstrapping Fireflies. Revert back to olden daze where the EstablishmentEvent was used in ye Note. Use this as the verifier for the Participant who's Note this is. This locks the View into the current Note of the Participant, as there's only that event as the Verifier. Validation is used in the Gateway admission, to ensure only valid member IDs join. * Cleanup and extend SwarmTest timeout * moar clean up * Use non virtual threads on inbound servers * halve swarm test for the poor build hamsters. Moar view cleanup * Moar view stuff Spawn a ViewManagement.populate using the supplied initial seed set to help ensure population coverage and prevent successor loops with other members who just happened to joined in the same view. * The problem child * This seems to make everything all right again. Fix/revert a lot of threading changes made that effed things up. Scheduled execs do not work well with VThreads, so always fork the running. * Reinstitute delegated event validation and verification to Fireflies. Segregate append from read only KERL/KEL Revert back to EventCoordinates for Fireflies note. * fix isolates * Work around the Verifiers * Minimize sig verification in replication. Re-enable sig verification in Firefly tests. Whoooo boy. * add keyState api for bootstrap verification * readd Bootstrapper * Use delegated Verifiers and EventValidation in the View * Moar cleanup wrt scheduling Also remove delegated verifier/validation as I don't believe that will be necessary --- .github/workflows/maven.yml | 9 +- README.md | 205 ++++-- .../com/salesforce/apollo/choam/CHOAM.java | 22 +- .../salesforce/apollo/choam/Committee.java | 20 +- .../apollo/choam/GenesisAssembly.java | 4 +- .../apollo/choam/GenesisContext.java | 3 +- .../salesforce/apollo/choam/Parameters.java | 64 +- .../com/salesforce/apollo/choam/Producer.java | 312 +++++---- .../com/salesforce/apollo/choam/Session.java | 67 +- .../salesforce/apollo/choam/ViewAssembly.java | 8 +- .../salesforce/apollo/choam/ViewContext.java | 47 +- .../apollo/choam/comm/Concierge.java | 15 +- .../apollo/choam/comm/Submitter.java | 5 +- .../apollo/choam/comm/Terminal.java | 2 +- .../apollo/choam/comm/TerminalClient.java | 6 +- .../apollo/choam/comm/TerminalServer.java | 21 +- .../apollo/choam/comm/TxnSubmission.java | 5 +- .../apollo/choam/comm/TxnSubmitClient.java | 20 +- .../apollo/choam/comm/TxnSubmitServer.java | 9 +- .../apollo/choam/support/Bootstrapper.java | 15 +- .../choam/support/CheckpointAssembler.java | 25 +- .../apollo/choam/support/CheckpointState.java | 4 +- .../apollo/choam/support/DigestType.java | 9 +- .../apollo/choam/support/HashedBlock.java | 70 +- .../choam/support/HashedCertifiedBlock.java | 43 +- .../apollo/choam/support/Store.java | 2 +- .../choam/support/SubmittedTransaction.java | 2 +- .../apollo/choam/support/TxDataSource.java | 46 +- .../apollo/choam/GenesisAssemblyTest.java | 4 +- .../apollo/choam/MembershipTests.java | 2 +- .../salesforce/apollo/choam/SessionTest.java | 12 +- .../salesforce/apollo/choam/TestCHOAM.java | 2 +- .../salesforce/apollo/choam/TestChain.java | 2 +- .../apollo/choam/Transactioneer.java | 2 +- .../apollo/choam/ViewAssemblyTest.java | 10 +- .../choam/support/BootstrapperTest.java | 6 +- .../support/CheckpointAssemblerTest.java | 8 +- .../choam/support/TxDataSourceTest.java | 8 +- cryptography/pom.xml | 4 - .../apollo/bloomFilters/BloomFilter.java | 4 +- .../apollo/bloomFilters/BloomWindow.java | 2 +- .../apollo/cryptography/Digest.java | 4 +- .../apollo/cryptography/DigestAlgorithm.java | 20 + .../apollo/cryptography/HexBloom.java | 40 +- .../apollo/cryptography/JohnHancock.java | 13 +- .../apollo/cryptography/QualifiedBase64.java | 6 +- .../apollo/cryptography/Verifier.java | 164 ++--- .../apollo/bloomFilters/BloomWindowTest.java | 2 +- delphinius/README.md | 91 ++- .../apollo/delphinius/AbstractOracle.java | 217 +++--- .../apollo/delphinius/Questions3Test.java | 27 +- demo/README.md | 3 - demo/pom.xml | 49 -- .../apollo/demo/DelphiResource.java | 404 ----------- .../apollo/demo/DemoApplication.java | 30 - .../apollo/demo/DemoConfiguration.java | 27 - .../apollo/demo/DemoHealthCheck.java | 22 - .../apollo/demo/ProtobufMimeProvider.java | 82 --- .../com/salesforce/apollo/demo/DemoTest.java | 48 -- demo/src/test/resources/demo-test.yaml | 10 - .../com/salesforce/apollo/ethereal/Adder.java | 126 ++-- .../com/salesforce/apollo/ethereal/Crown.java | 28 +- .../com/salesforce/apollo/ethereal/Dag.java | 214 +++--- .../apollo/ethereal/EpochProofBuilder.java | 173 +++-- .../salesforce/apollo/ethereal/Ethereal.java | 32 +- .../salesforce/apollo/ethereal/PreUnit.java | 396 ++++++----- .../salesforce/apollo/ethereal/Processor.java | 30 +- .../com/salesforce/apollo/ethereal/Unit.java | 245 ++++--- .../salesforce/apollo/ethereal/Waiting.java | 26 +- .../apollo/ethereal/WeakThresholdKey.java | 4 +- .../ethereal/memberships/ChRbcGossip.java | 16 +- .../ethereal/memberships/comm/Gossiper.java | 6 +- .../memberships/comm/GossiperClient.java | 12 +- .../memberships/comm/GossiperServer.java | 11 +- .../memberships/comm/GossiperService.java | 7 +- .../apollo/ethereal/EtherealTest.java | 2 +- fireflies/pom.xml | 8 +- .../apollo/fireflies/AccusationWrapper.java | 3 +- .../salesforce/apollo/fireflies/Binding.java | 286 ++++---- .../apollo/fireflies/Bootstrapper.java | 244 +++++++ .../apollo/fireflies/NoteWrapper.java | 24 +- .../apollo/fireflies/Parameters.java | 92 ++- .../com/salesforce/apollo/fireflies/View.java | 632 ++++++++++-------- .../apollo/fireflies/ViewManagement.java | 187 ++++-- .../fireflies/comm/entrance/Entrance.java | 40 +- .../comm/entrance/EntranceClient.java | 90 +-- .../comm/entrance/EntranceServer.java | 79 ++- .../comm/entrance/EntranceService.java | 13 +- .../fireflies/comm/gossip/FFService.java | 7 +- .../fireflies/comm/gossip/FfClient.java | 12 +- .../fireflies/comm/gossip/FfServer.java | 9 +- .../fireflies/comm/gossip/Fireflies.java | 6 +- .../apollo/fireflies/ChurnTest.java | 33 +- .../salesforce/apollo/fireflies/E2ETest.java | 36 +- .../salesforce/apollo/fireflies/MtlsTest.java | 26 +- .../apollo/fireflies/SwarmTest.java | 39 +- fireflies/src/test/resources/logback-test.xml | 45 +- .../gorgoneion/client/GorgoneionClient.java | 44 +- .../client/client/comm/Admissions.java | 8 +- .../client/client/comm/AdmissionsClient.java | 10 +- .../client/GorgoneionClientTest.java | 87 ++- .../apollo/gorgoneion/Gorgoneion.java | 12 +- .../apollo/gorgoneion/Parameters.java | 19 +- .../comm/admissions/AdmissionsServer.java | 11 +- .../comm/admissions/AdmissionsService.java | 9 +- .../comm/endorsement/Endorsement.java | 10 +- .../comm/endorsement/EndorsementClient.java | 14 +- .../comm/endorsement/EndorsementServer.java | 16 +- .../comm/endorsement/EndorsementService.java | 10 +- .../apollo/gorgoneion/Admissions.java | 8 +- .../apollo/gorgoneion/AdmissionsClient.java | 13 +- .../apollo/gorgoneion/GorgoneionTest.java | 14 +- grpc/README.md | 3 + grpc/src/main/proto/choam.proto | 9 +- grpc/src/main/proto/crypto.proto | 2 +- grpc/src/main/proto/demesne.proto | 60 +- grpc/src/main/proto/ethereal.proto | 88 +-- grpc/src/main/proto/fireflies.proto | 164 ++--- grpc/src/main/proto/gorgoneion.proto | 50 +- grpc/src/main/proto/leyden.proto | 63 ++ grpc/src/main/proto/messaging.proto | 35 +- grpc/src/main/proto/sql-state.proto | 102 +-- grpc/src/main/proto/stereotomy-services.proto | 2 +- grpc/src/main/proto/stereotomy.proto | 4 +- grpc/src/main/proto/test.proto | 14 +- grpc/src/main/proto/thoth.proto | 9 +- isolate-ftesting/pom.xml | 35 +- .../apollo/domain/DemesneIsolateTest.java | 6 +- isolates/pom.xml | 24 +- .../demesnes/isolate/DemesneIsolate.java | 8 +- .../apollo/demesnes/DemesneSmoke.java | 19 +- .../apollo/demesnes/FireFliesTrace.java | 45 +- leyden/README.md | 3 + leyden/pom.xml | 48 ++ .../apollo/leyden/BoundDatatype.java | 36 +- .../apollo/leyden/CombinedIntervals.java | 81 +++ .../apollo/leyden/DigestDatatype.java | 52 ++ .../salesforce/apollo/leyden/KeyInterval.java | 53 ++ .../salesforce/apollo/leyden/LeydenJar.java | 591 ++++++++++++++++ .../apollo/leyden/comm/binding/Bind.java | 84 +++ .../leyden/comm/binding/BinderClient.java | 18 + .../leyden/comm/binding/BinderMetrics.java | 22 + .../leyden/comm/binding/BinderServer.java | 107 +++ .../leyden/comm/binding/BinderService.java | 17 + .../leyden/comm/reconcile/Reckoning.java | 74 ++ .../comm/reconcile/ReconciliationClient.java | 15 + .../comm/reconcile/ReconciliationMetrics.java | 15 + .../comm/reconcile/ReconciliationServer.java | 85 +++ .../comm/reconcile/ReconciliationService.java | 15 + .../apollo/leyden/CombinedIntervalsTest.java | 46 ++ .../apollo/leyden/LeydenJarTest.java | 135 ++++ leyden/src/test/resources/logback-test.xml | 30 + liquibase-deterministic/README.md | 3 + memberships/pom.xml | 68 +- .../apollo/archipelago/Enclave.java | 2 +- .../apollo/archipelago/LocalServer.java | 4 +- .../archipelago/ManagedServerChannel.java | 38 +- .../apollo/archipelago/MtlsClient.java | 15 +- .../apollo/archipelago/MtlsServer.java | 4 +- .../salesforce/apollo/archipelago/Portal.java | 2 +- .../apollo/archipelago/RouterImpl.java | 27 +- .../archipelago/ServerConnectionCache.java | 472 ++++++------- .../apollo/membership/CompactContext.java | 360 +++++----- .../salesforce/apollo/membership/Context.java | 268 ++++---- .../apollo/membership/ContextImpl.java | 250 +++---- .../apollo/membership/ReservoirSampler.java | 19 +- .../messaging/rbc/ReliableBroadcaster.java | 12 +- .../messaging/rbc/comms/RbcClient.java | 12 +- .../messaging/rbc/comms/RbcServer.java | 11 +- .../rbc/comms/ReliableBroadcast.java | 7 +- .../ControlledIdentifierMember.java | 2 +- .../apollo/ring/RingCommunications.java | 32 +- .../salesforce/apollo/ring/RingIterator.java | 62 +- .../salesforce/apollo/ring/SliceIterator.java | 34 +- .../apollo/archipeligo/DemultiplexerTest.java | 23 +- .../apollo/archipeligo/EnclaveTest.java | 13 +- .../apollo/archipeligo/LocalServerTest.java | 8 +- .../apollo/archipeligo/RouterTest.java | 8 +- .../membership/messaging/rbc/RbcTest.java | 2 +- .../salesforce/apollo/ring/ServerImpl.java | 4 +- .../salesforce/apollo/ring/ServiceImpl.java | 13 +- .../salesforce/apollo/ring/TestItClient.java | 6 +- model/pom.xml | 43 +- .../com/salesforce/apollo/model/Domain.java | 158 ++--- .../apollo/model/ProcessContainerDomain.java | 245 +++++++ .../apollo/model/ProcessDomain.java | 287 ++------ .../salesforce/apollo/model/SubDomain.java | 42 +- .../apollo/model/comms/Delegation.java | 7 +- .../apollo/model/comms/DelegationClient.java | 10 +- .../apollo/model/comms/DelegationServer.java | 7 +- .../apollo/model/comms/DelegationService.java | 5 +- .../model/delphinius/ShardedOracle.java | 106 +-- .../apollo/model/demesnes/Demesne.java | 5 +- .../apollo/model/demesnes/DemesneImpl.java | 90 +-- .../apollo/model/demesnes/JniBridge.java | 25 +- .../demesnes/comm/DemesneKERLServer.java | 23 +- .../demesnes/comm/OuterContextClient.java | 9 +- .../demesnes/comm/OuterContextServer.java | 9 +- .../demesnes/comm/OuterContextService.java | 5 +- .../apollo/model/stereotomy/ShardedKERL.java | 2 +- model/src/main/resources/initialize.xml | 19 +- .../apollo/model/ContainmentDomainTest.java | 135 ++++ .../salesforce/apollo/model/DomainTest.java | 32 +- .../apollo/model/FireFliesTest.java | 55 +- .../apollo/model/StoredProceduresTest.java | 58 -- .../apollo/model/demesnes/DemesneTest.java | 21 +- .../model/stereotomy/ShardedKERLTest.java | 29 +- model/src/test/resources/logback-test.xml | 38 +- pom.xml | 42 +- protocols/README.md | 14 +- protocols/pom.xml | 24 +- .../limiter/LifoBlockingLimiterTest.java | 66 +- .../apollo/comm/grpc/DomainSocketTest.java | 6 +- .../apollo/comm/grpc/MtlsClient.java | 29 +- .../apollo/comm/grpc/MtlsServer.java | 153 ++--- .../salesforce/apollo/comm/grpc/TestMtls.java | 43 +- schemas/src/main/resources/model/model.xml | 28 +- .../com/salesforce/apollo/state/Emulator.java | 6 +- .../com/salesforce/apollo/state/Mutator.java | 4 +- .../apollo/state/SqlStateMachine.java | 6 +- .../apollo/state/AbstractLifecycleTest.java | 4 +- .../salesforce/apollo/state/CHOAMTest.java | 4 +- .../apollo/state/MigrationTest.java | 70 +- .../salesforce/apollo/state/MutatorTest.java | 42 +- .../salesforce/apollo/state/ScriptTest.java | 4 +- .../apollo/state/Transactioneer.java | 30 +- .../salesforce/apollo/state/UpdaterTest.java | 19 +- .../services/grpc/binder/BinderClient.java | 24 +- .../services/grpc/binder/BinderServer.java | 9 +- .../services/grpc/kerl/CommonKERLClient.java | 4 +- .../services/grpc/kerl/KERLAdapter.java | 18 +- .../services/grpc/kerl/KERLClient.java | 2 +- .../services/grpc/kerl/KERLServer.java | 18 +- .../services/grpc/observer/EventObserver.java | 13 +- .../grpc/observer/EventObserverClient.java | 28 +- .../grpc/observer/EventObserverServer.java | 12 +- .../grpc/resolver/ResolverClient.java | 24 +- .../grpc/resolver/ResolverServer.java | 7 +- .../validation/EventValidationClient.java | 28 +- .../validation/EventValidationServer.java | 5 +- .../stereotomy/services/grpc/TestBinder.java | 12 +- .../services/grpc/TestEventObserver.java | 9 +- .../services/grpc/TestEventValidation.java | 12 +- .../services/grpc/TestKerlService.java | 9 +- .../services/grpc/TestResolver.java | 4 +- .../apollo/stereotomy/DelegatedKERL.java | 16 +- .../apollo/stereotomy/DigestKERL.java | 2 +- .../apollo/stereotomy/EventCoordinates.java | 59 +- .../apollo/stereotomy/EventValidation.java | 93 +-- .../com/salesforce/apollo/stereotomy/KEL.java | 60 +- .../salesforce/apollo/stereotomy/KERL.java | 37 +- .../apollo/stereotomy/KerlVerifier.java | 50 +- .../apollo/stereotomy/KeyCoordinates.java | 30 +- .../apollo/stereotomy/KeyState.java | 18 +- .../apollo/stereotomy/KeyStateVerifier.java | 57 ++ .../apollo/stereotomy/ReadOnlyKERL.java | 59 -- .../apollo/stereotomy/Stereotomy.java | 4 +- .../apollo/stereotomy/StereotomyImpl.java | 19 +- .../apollo/stereotomy/Verifiers.java | 158 +++-- .../apollo/stereotomy/caching/CachingKEL.java | 20 +- .../stereotomy/caching/CachingKERL.java | 16 +- .../apollo/stereotomy/db/UniKERL.java | 8 +- .../stereotomy/event/AttachmentEvent.java | 43 +- .../apollo/stereotomy/event/KeyEvent.java | 22 +- ...eyStateWithEndorsementsAndValidations.java | 44 +- .../apollo/stereotomy/event/Seal.java | 199 ++++-- .../event/protobuf/AttachmentEventImpl.java | 7 +- .../protobuf/DelegatedInceptionEventImpl.java | 3 +- .../protobuf/DelegatedRotationEventImpl.java | 3 +- .../protobuf/EstablishmentEventImpl.java | 7 +- .../event/protobuf/InceptionEventImpl.java | 13 +- .../event/protobuf/InteractionEventImpl.java | 9 +- .../event/protobuf/KeyEventImpl.java | 7 +- .../event/protobuf/KeyStateImpl.java | 65 +- .../event/protobuf/ProtobufEventFactory.java | 144 ++-- .../event/protobuf/RotationEventImpl.java | 11 +- .../identifier/BasicIdentifier.java | 5 +- .../stereotomy/identifier/Identifier.java | 59 +- .../identifier/QualifiedBase64Identifier.java | 2 +- .../identifier/SelfAddressingIdentifier.java | 3 +- .../identifier/SelfSigningIdentifier.java | 3 +- .../spec/IdentifierSpecification.java | 429 ++++++------ .../spec/KeyConfigurationDigester.java | 21 +- .../spec/RotationSpecification.java | 396 +++++------ .../apollo/stereotomy/mem/MemKERL.java | 17 +- .../apollo/stereotomy/services/Binder.java | 22 +- .../services/proto/ProtoBinder.java | 5 +- .../services/proto/ProtoEventObserver.java | 9 +- .../services/proto/ProtoEventValidation.java | 3 +- .../services/proto/ProtoKERLAdapter.java | 6 +- .../services/proto/ProtoKERLProvider.java | 2 +- .../services/proto/ProtoKERLService.java | 2 +- .../services/proto/ProtoResolver.java | 5 +- .../apollo/stereotomy/StereotomyTests.java | 56 +- .../java/com/salesforce/apollo/thoth/Ani.java | 45 +- .../apollo/thoth/CombinedIntervals.java | 17 +- .../apollo/thoth/DirectPublisher.java | 8 +- .../com/salesforce/apollo/thoth/KerlDHT.java | 540 ++++++++------- .../salesforce/apollo/thoth/KerlSpace.java | 338 +++++----- .../salesforce/apollo/thoth/KeyInterval.java | 17 +- .../apollo/thoth/LoggingOutputStream.java | 16 +- .../com/salesforce/apollo/thoth/Maat.java | 5 +- .../salesforce/apollo/thoth/Publisher.java | 22 +- .../com/salesforce/apollo/thoth/Thoth.java | 53 +- .../apollo/thoth/grpc/ThothServer.java | 110 +++ .../thoth/grpc/delegation/Delegation.java | 5 +- .../grpc/delegation/DelegationClient.java | 1 - .../grpc/delegation/DelegationServer.java | 3 +- .../grpc/delegation/DelegationService.java | 1 - .../apollo/thoth/grpc/dht/DhtClient.java | 58 +- .../apollo/thoth/grpc/dht/DhtServer.java | 8 +- .../apollo/thoth/grpc/dht/DhtService.java | 8 +- .../grpc/reconciliation/Reconciliation.java | 7 +- .../reconciliation/ReconciliationClient.java | 51 +- .../reconciliation/ReconciliationServer.java | 10 +- .../reconciliation/ReconciliationService.java | 10 +- .../thoth/metrics/GorgoneionMetrics.java | 1 - .../apollo/thoth/metrics/KerlDhtMetrics.java | 1 - .../apollo/thoth/AbstractDhtTest.java | 32 +- .../com/salesforce/apollo/thoth/AniTest.java | 25 +- .../apollo/thoth/BootstrappingTest.java | 25 +- .../apollo/thoth/DhtRebalanceTest.java | 140 +++- .../salesforce/apollo/thoth/KerlDhtTest.java | 2 +- .../apollo/thoth/KerlSpaceTest.java | 109 +-- .../com/salesforce/apollo/thoth/KerlTest.java | 12 +- .../apollo/thoth/PublisherTest.java | 4 +- .../apollo/thoth/ThothServerTest.java | 121 ++++ .../salesforce/apollo/thoth/ThothTest.java | 8 +- thoth/src/test/resources/logback-test.xml | 32 +- 329 files changed, 9204 insertions(+), 7225 deletions(-) delete mode 100644 demo/README.md delete mode 100644 demo/pom.xml delete mode 100644 demo/src/main/java/com/salesforce/apollo/demo/DelphiResource.java delete mode 100644 demo/src/main/java/com/salesforce/apollo/demo/DemoApplication.java delete mode 100644 demo/src/main/java/com/salesforce/apollo/demo/DemoConfiguration.java delete mode 100644 demo/src/main/java/com/salesforce/apollo/demo/DemoHealthCheck.java delete mode 100644 demo/src/main/java/com/salesforce/apollo/demo/ProtobufMimeProvider.java delete mode 100644 demo/src/test/java/com/salesforce/apollo/demo/DemoTest.java delete mode 100644 demo/src/test/resources/demo-test.yaml create mode 100644 fireflies/src/main/java/com/salesforce/apollo/fireflies/Bootstrapper.java create mode 100644 grpc/README.md create mode 100644 grpc/src/main/proto/leyden.proto create mode 100644 leyden/README.md create mode 100644 leyden/pom.xml rename model/src/main/java/com/salesforce/apollo/model/ProtobufDatatype.java => leyden/src/main/java/com/salesforce/apollo/leyden/BoundDatatype.java (52%) create mode 100644 leyden/src/main/java/com/salesforce/apollo/leyden/CombinedIntervals.java create mode 100644 leyden/src/main/java/com/salesforce/apollo/leyden/DigestDatatype.java create mode 100644 leyden/src/main/java/com/salesforce/apollo/leyden/KeyInterval.java create mode 100644 leyden/src/main/java/com/salesforce/apollo/leyden/LeydenJar.java create mode 100644 leyden/src/main/java/com/salesforce/apollo/leyden/comm/binding/Bind.java create mode 100644 leyden/src/main/java/com/salesforce/apollo/leyden/comm/binding/BinderClient.java create mode 100644 leyden/src/main/java/com/salesforce/apollo/leyden/comm/binding/BinderMetrics.java create mode 100644 leyden/src/main/java/com/salesforce/apollo/leyden/comm/binding/BinderServer.java create mode 100644 leyden/src/main/java/com/salesforce/apollo/leyden/comm/binding/BinderService.java create mode 100644 leyden/src/main/java/com/salesforce/apollo/leyden/comm/reconcile/Reckoning.java create mode 100644 leyden/src/main/java/com/salesforce/apollo/leyden/comm/reconcile/ReconciliationClient.java create mode 100644 leyden/src/main/java/com/salesforce/apollo/leyden/comm/reconcile/ReconciliationMetrics.java create mode 100644 leyden/src/main/java/com/salesforce/apollo/leyden/comm/reconcile/ReconciliationServer.java create mode 100644 leyden/src/main/java/com/salesforce/apollo/leyden/comm/reconcile/ReconciliationService.java create mode 100644 leyden/src/test/java/com/salesforce/apollo/leyden/CombinedIntervalsTest.java create mode 100644 leyden/src/test/java/com/salesforce/apollo/leyden/LeydenJarTest.java create mode 100644 leyden/src/test/resources/logback-test.xml create mode 100644 liquibase-deterministic/README.md create mode 100644 model/src/main/java/com/salesforce/apollo/model/ProcessContainerDomain.java create mode 100644 model/src/test/java/com/salesforce/apollo/model/ContainmentDomainTest.java delete mode 100644 model/src/test/java/com/salesforce/apollo/model/StoredProceduresTest.java create mode 100644 stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KeyStateVerifier.java delete mode 100644 stereotomy/src/main/java/com/salesforce/apollo/stereotomy/ReadOnlyKERL.java create mode 100644 thoth/src/main/java/com/salesforce/apollo/thoth/grpc/ThothServer.java create mode 100644 thoth/src/test/java/com/salesforce/apollo/thoth/ThothServerTest.java diff --git a/.github/workflows/maven.yml b/.github/workflows/maven.yml index 6c7966b997..9fa49f0ba5 100644 --- a/.github/workflows/maven.yml +++ b/.github/workflows/maven.yml @@ -1,6 +1,6 @@ name: Java CI -on: [push] +on: [ push ] jobs: build: @@ -8,6 +8,13 @@ jobs: runs-on: ubuntu-latest steps: + - name: Cache local Maven repository + uses: actions/cache@v3 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-maven- - name: Set up Maven uses: stCarolas/setup-maven@v4.5 with: diff --git a/README.md b/README.md index 61809e7300..bc61f22575 100644 --- a/README.md +++ b/README.md @@ -1,52 +1,79 @@ # Apollo Delphinius -The Apollo Delphinius project is an experimental multi-tenant, distributed system platform. Apollo provides a secure communications overlay using Fireflies. The consensus layer is supplied by an asynchronous bft consensus protocol. The sql state interface is via a JDBC connection over replicated SQL state machines, supported by checkpointed CHOAM linear logs. Identity and key managment is provided as a foundational service and integrated into the MTLS grpc communication. -The target service goal is a multitenant Zanzibar/KERI integration that provides a wide area replicated, low latency service for managing identity, key management, access control and verifiable credentials such as JWT issuance and validation. +The Apollo Delphinius project is an experimental multi-tenant, distributed system platform. Apollo provides a secure +communications overlay using Fireflies. The consensus layer is supplied by an asynchronous bft consensus protocol. The +sql state interface is via a JDBC connection over replicated SQL state machines, supported by checkpointed CHOAM linear +logs. Identity and key managment is provided as a foundational service and integrated into the MTLS grpc communication. + +The target service goal is a multitenant Zanzibar/KERI integration that provides a wide area replicated, low latency +service for managing identity, key management, access control and verifiable credentials such as JWT issuance and +validation. ## Build Status + ![Build Status](https://github.com/salesforce/apollo/actions/workflows/maven.yml/badge.svg) -The Java Maven CI is now integrated, and given how weak these CI daemons are, this should guarantee reproducible clean builds from the command line maven. +The Java Maven CI is now integrated, and given how weak these CI daemons are, this should guarantee reproducible clean +builds from the command line maven. ## Not A Coin Platform™ -Apollo isn't designed for coins, rather as essentially a distributed multitenant database. Of course, while the systems and mechanisms of Apollo can be used for such, the design goals are much different. Thus, no coins for you. + +Apollo isn't designed for coins, rather as essentially a distributed multitenant database. Of course, while the systems +and mechanisms of Apollo can be used for such, the design goals are much different. Thus, no coins for you. ## Some Features + * Multitenant isolation enclaves using GraalVM Isolates * Self contained cryptography module - Self describing Digests, Signatures and Identifiers, solid Bloomfilters. -* Decentralized Identifier based foundation and key managment infrastructure, based on the [Key Event Receipt Infrastructure](https://github.com/decentralized-identity/keri) (KERI) +* Decentralized Identifier based foundation and key managment infrastructure, based on + the [Key Event Receipt Infrastructure](https://github.com/decentralized-identity/keri) (KERI) * Secure and trusted attestation, identity boostrapping and secrets provisioning -* MTLS network communication - KERI for MTLS certificate authentication. Local communication simulation for simplified multinode simulation for single process (IDE) testing +* MTLS network communication - KERI for MTLS certificate authentication. Local communication simulation for simplified + multinode simulation for single process (IDE) testing * Multi instance GRPC service routing - Context keyed services and routing framework -* Byzantine intrusion tolerant secure membership and communications overlay providing virtually synchronous, stable membership views. +* Byzantine intrusion tolerant secure membership and communications overlay providing virtually synchronous, stable + membership views. * Efficient and easy to reuse communication patterns for Fireflies ring style gossiping on membership contexts * Reliable Broadcast - garbage collected, context routed reliable broadcast * Efficient atomic broadcast in asynchronous networks with byzantine nodes * Dynamic, committee based, transaction causal ordering service producing linear logs - Replicated State Machines -* JDBC accessible, SQL store backed, materialized views maintained by a SQL state machine. Supports DDL, DML, stored procedures, functions and triggers. +* JDBC accessible, SQL store backed, materialized views maintained by a SQL state machine. Supports DDL, DML, stored + procedures, functions and triggers. * Google Zanzibar functionality providing Relation Based Access Control hosted on SQL state machines. ## Requirements + Apollo requires JDK 21+ and [Maven](https://maven.apache.org/) 3.8.1 and above ### Install Maven + See [Installing Apache Maven](https://maven.apache.org/install.html) if you need to install Maven. ### Install GraalVM (Optional) -Apollo optionally requires the GraalVM 22.3.1+ for leveraging Isolates and other fantastic features of the GraalVM. To install the GraalVM, see the [Getting Started Guide](https://www.graalvm.org/latest/docs/getting-started/). For Mac and Apple Silicon, use the [Homebrew Tap for GraalVM](https://github.com/graalvm/homebrew-tap). + +Apollo optionally requires the GraalVM 22.3.1+ for leveraging Isolates and other fantastic features of the GraalVM. To +install the GraalVM, see the [Getting Started Guide](https://www.graalvm.org/latest/docs/getting-started/). For Mac and +Apple Silicon, use the [Homebrew Tap for GraalVM](https://github.com/graalvm/homebrew-tap). ## Building Apollo -**Important**: To provide deterministic SQL execution, Apollo requires an installation step that need only be done once. If you are building Apollo for the first time, you __must__ cd to the root directory of the repository and then: + +**Important**: To provide deterministic SQL execution, Apollo requires an installation step that need only be done once. +If you are building Apollo for the first time, you __must__ cd to the root directory of the repository and then: mvn clean install -Ppre -DskipTests - -This will perform a full build, including the deterministic SQL execution module. After this is complete, you do not need to do this again. You can build Apollo normally, without the deterministic SQL module and to do so cd to the root directory of the repository and then: - + +This will perform a full build, including the deterministic SQL execution module. After this is complete, you do not +need to do this again. You can build Apollo normally, without the deterministic SQL module and to do so cd to the root +directory of the repository and then: + mvn clean install -Note that the _install_ maven goal is **required**, as this installs the modules in your local repository for use by dependent modules within the rest of the build. You must have invoked maven on the Apollo project root with the "install" goal at least once, to correctly build any arbitrary submodule. +Note that the _install_ maven goal is **required**, as this installs the modules in your local repository for use by +dependent modules within the rest of the build. You must have invoked maven on the Apollo project root with the " +install" goal at least once, to correctly build any arbitrary submodule. -You can, of course, use the "--also-make-dependents" argument for maven "-amd" if you want to build a particular module without performing the full build. +You can, of course, use the "--also-make-dependents" argument for maven "-amd" if you want to build a particular module +without performing the full build. ### Building Apollo Isolate Enclaves @@ -58,112 +85,188 @@ This will add the *[isolates](isolates/README.md)* modules to the build. ### Platform Specific Domain Socket Support -Platform specific code for supporting Unix Domain Socket in GRPC Netty is segregated into two different modules: *[domain-epoll](domain-epoll)* and *[domain-kqueue](domain-kqueue)*. These modules are added via platform specific profiles that are activated for the platform the build is running on. +Platform specific code for supporting Unix Domain Socket in GRPC Netty is segregated into two different modules: +*[domain-epoll](domain-epoll)* and *[domain-kqueue](domain-kqueue)*. These modules are added via platform specific +profiles that are activated for the platform the build is running on. ## Modules -Apollo is reasonably modularized mostly for the purpose of subsystem isolation and reuse. Each module is a Maven module under the source root and contains a README.md documenting (such as it is at the moment, lol) the module. + +Apollo is reasonably modularized mostly for the purpose of subsystem isolation and reuse. Each module is a Maven module +under the source root and contains a README.md documenting (such as it is at the moment, lol) the module. * [CHOAM](choam/README.md) - Committee maintanence of replicated state machines * [Delphinius](delphinius/README.md) - Bare bones Google Zanzibar clone -* [Demo](demo/README.md) - Hypothetical DropWizard REST API for Delphinus running on the Apollo stack * [Domain-EPoll](domain-epoll) - linux support for Netty domain sockets * [Domain-KQueue](domain-epoll) - mac osx support for Netty domain sockets +* [Domain-Sockets](domain-sockets) - unifying abstraction for the different OS domain sockets * [Ethereal](ethereal/README.md) - Aleph asynchronous BFT atomic broadcast (consensus block production) -* [Fireflies](fireflies/README.md) - Byzantine intrusion tolerant, virtually synchronous membership service and secure communications overlay +* [Fireflies](fireflies/README.md) - Byzantine intrusion tolerant, virtually synchronous membership service and secure + communications overlay * [Deterministic H2](h2-deterministic) - Deterministic H2 SQL Database * [Deterministic Liquibase](liquibase-deterministic) - Deterministic Liquibase * [Gorgoneion](gorgoneion/README.md) - Identity bootstrapping * [Gorgoneion Client](gorgoneion-client/README.md) - Identity bootstrap client * [Isolates](isolates/README.md) - GraalVM shared libray construction of Apollo sub domain enclaves. * [Isolate Functional Testing](isolate-ftesting/README.md) - Functional testing of Apollo domain enclaves. -* [Memberships](memberships/README.md) - Fundamental membership and Context model. Local and MTLS GRPC _Routers_. Ring communication and gossip patterns. -* [Model](model/README.md) - Replicated domains. Process and multitentant sharding domains and enclaves. +* [Memberships](memberships/README.md) - Fundamental membership and Context model. Local and MTLS GRPC _Routers_. Ring + communication and gossip patterns. +* [Model](model/README.md) - Replicated domains. Process and multitentant sharding domains and enclaves. * [Protocols](protocols/README.md) - GRPC MTLS service fundamentals, Netflix GRPC and other rate limiters. * [Schemas](schemas/README.md) - Liquibase SQL definitions for other modules -* [Sql-State](sql-state/README.md) - Replicated SQL state machines running on CHOAM linear logs. JDBC interface. -* [Stereotomy](stereotomy/README.md) - Key Event Receipt Infrastructure. KEL, KERL and other fundamental identity, key and trust management +* [Sql-State](sql-state/README.md) - Replicated SQL state machines running on CHOAM linear logs. JDBC interface. +* [Stereotomy](stereotomy/README.md) - Key Event Receipt Infrastructure. KEL, KERL and other fundamental identity, key + and trust management * [Stereotomy Services](stereotomy-services) - GRPC services and protobuff interfaces for KERI services -* [Thoth](thoth/README.md) - Decentralized Stereotomy. Distributed hash table storage, protocols and API for managing KERI decentralized identity +* [Thoth](thoth/README.md) - Decentralized Stereotomy. Distributed hash table storage, protocols and API for managing + KERI decentralized identity * [Tron](tron/README.md) - Compact, sophisticated Finite State Machine model using Java Enums. -* [Cryptography](cryptography/README.md) - Base cryptography primitives. Bloom filters (of several varieties). Some general utility stuff. - +* [Cryptography](cryptography/README.md) - Base cryptography primitives. Bloom filters (of several varieties). Some + general utility stuff. ## Protobuf and GRPC -Apollo uses Protobuf for all serialization and GRPC for all interprocess communication. This implies code generation. Not something I adore, but not much choice in the matter. GRPC/Proto generation also appears not to play well with the Eclipse IDE Maven integration. To aleviate this, _all_ grpc/proto generation occurs in one module, the aptly named _grpc_ module. + +Apollo uses Protobuf for all serialization and GRPC for all interprocess communication. This implies code generation. +Not something I adore, but not much choice in the matter. GRPC/Proto generation also appears not to play well with the +Eclipse IDE Maven integration. To aleviate this, _all_ grpc/proto generation occurs in one module, the aptly named +_grpc_ module. ## JOOQ -Apollo makes use of [JOOQ](https://www.jooq.org) as a SQL DSL for Java. This also implies code generation and, again, not something I adore. Unlike GRPC, the JOOQ code generation plays very nicely with the Eclipse IDE's Maven integration, so JOOQ code generation is included in the module that defines it. + +Apollo makes use of [JOOQ](https://www.jooq.org) as a SQL DSL for Java. This also implies code generation and, again, +not something I adore. Unlike GRPC, the JOOQ code generation plays very nicely with the Eclipse IDE's Maven integration, +so JOOQ code generation is included in the module that defines it. ## WIP -Note that Apollo Delphinius is very much a _work_ _in_ _progress_ . There is not yet an official release. Thus, it is by no means a full featured, hardened distributed systems platform. + +Note that Apollo Delphinius is very much a _work_ _in_ _progress_ . There is not yet an official release. Thus, it +is by no means a full featured, hardened distributed systems platform. ## Requirements -Apollo is a pure Java application The build system uses Maven, and requires Maven 3.8.1+. The Maven enforcer plugin enforces dependency convergance and Apollo is built using Java 17. -Apollo is a [multi module Maven project](https://maven.apache.org/guides/mini/guide-multiple-modules.html). This means that the various modules of Apollo are built and versioned as a whole, rather than being separated out into individual repositories. This also means that modules refer to other modules within the project as dependencies, and consequently must be built in the correct order. Note that Maven does this by default, so there should be no issues. However, it does mean that one can't simply cd into a module and build it without building its dependencies first. If you feel you must do so, please make sure to include the "install" goal and please make sure you add the "--also-make-dependents" or "--amd" parameter to your maven invocation. +Apollo is a pure Java application The build system uses Maven, and requires Maven 3.8.1+. The Maven enforcer plugin +enforces dependency convergance and Apollo is built using Java 17. + +Apollo is a [multi module Maven project](https://maven.apache.org/guides/mini/guide-multiple-modules.html). This means +that the various modules of Apollo are built and versioned as a whole, rather than being separated out into individual +repositories. This also means that modules refer to other modules within the project as dependencies, and consequently +must be built in the correct order. Note that Maven does this by default, so there should be no issues. However, it does +mean that one can't simply cd into a module and build it without building its dependencies first. If you feel you must +do so, please make sure to include the "install" goal and please make sure you add the "--also-make-dependents" or " +--amd" parameter to your maven invocation. ## Code Generation In Apollo -Apollo requires code generation as part of the build. This is performed in the Maven "generate-sources" phase of the build. Consequently, this build phase *must* be run at least once in order to generate the java sources required by the rest of the build. -The current code generators used in Apollo are GRPC/Proto and JOOQ. GRPC is for the various serializable forms and network protocols used by Apollo. The JOOQ code generation is for the JOOQ SQL functionality. +Apollo requires code generation as part of the build. This is performed in the Maven "generate-sources" phase of the +build. Consequently, this build phase *must* be run at least once in order to generate the java sources required by the +rest of the build. -GRPC/Protoc code generation only occurs in the _grpc_ module and is output into the _grpc/target/generated-sources_ directory. For GRPC/Proto, there are 2 directory roots: _grpc/target/generated-sources/protobuf/grpc-java_ and _grpc/target/generated-sources/protobuf/java_ . For JOOQ, the root directory is _(module dir)/target/generated-sources/jooq_ . +The current code generators used in Apollo are GRPC/Proto and JOOQ. GRPC is for the various serializable forms and +network protocols used by Apollo. The JOOQ code generation is for the JOOQ SQL functionality. -Again, I stress that because these generated source directories are under the "(module dir)/target" directory, they are removed during the "clean" phase of Maven and consequently must be regenerated in order to compile the rest of the build. +GRPC/Protoc code generation only occurs in the _grpc_ module and is output into the _grpc/target/generated-sources_ +directory. For GRPC/Proto, there are 2 directory roots: _grpc/target/generated-sources/protobuf/grpc-java_ and +_grpc/target/generated-sources/protobuf/java_ . For JOOQ, the root directory is _(module dir) +/target/generated-sources/jooq_ . -Note that adding these generated source directories to the compile path is automatically taken care of in the Maven *pom.xml* in the "build-helper" plugin. +Again, I stress that because these generated source directories are under the "(module dir)/target" directory, they are +removed during the "clean" phase of Maven and consequently must be regenerated in order to compile the rest of the +build. + +Note that adding these generated source directories to the compile path is automatically taken care of in the Maven +*pom.xml* in the "build-helper" plugin. ## IDE Integration + **This is Important!** -Apollo contains one module that create a shaded version of standard libraries. This module **must** be built (installed), but only needs to be built once in order to install the resulting jar into your local maven repository. This is performed as part of the top level pom's _pre_ profile. As mentioned previously, this profile must be executed at least once before the full build. Note, however, Eclipse and IntellJ **does not understand this transformation** and thus will not be able to import this module without errors and messing up the rest of the code that depends on the transformation. What this means is that the IDE thinks the module is fine and doesn't notice there has been package rewriting to avoid conflicts with existing libraries. What this means is that you *must* exclude this module in your IDE environment. This module will not be imported unless you explicitly do so, so please do not do so. If you really think you need to be working on it, then you probably understand all this. But if you are simply trying to get Apollo into your IDE, importing these module is gonna ruin your day. +Apollo contains one module that create a shaded version of standard libraries. This module **must** be built ( +installed), but only needs to be built once in order to install the resulting jar into your local maven repository. This +is performed as part of the top level pom's _pre_ profile. As mentioned previously, this profile must be executed at +least once before the full build. Note, however, Eclipse and IntellJ **does not understand this transformation** and +thus will not be able to import this module without errors and messing up the rest of the code that depends on the +transformation. What this means is that the IDE thinks the module is fine and doesn't notice there has been package +rewriting to avoid conflicts with existing libraries. What this means is that you *must* exclude this module in your IDE +environment. This module will not be imported unless you explicitly do so, so please do not do so. If you really think +you need to be working on it, then you probably understand all this. But if you are simply trying to get Apollo into +your IDE, importing these module is gonna ruin your day. ### Module to exclude The module to exclude is: - * h2-deterministic +* h2-deterministic -Again, I stress that you must **NOT** include this in the import of Apollo into your IDE. You'll be scratching your head and yelling at me about uncompilable code and I will simply, calmly point you to this part of the readme file. +Again, I stress that you must **NOT** include this in the import of Apollo into your IDE. You'll be scratching your head +and yelling at me about uncompilable code and I will simply, calmly point you to this part of the readme file. This module must be built so please run the following once from the top level of the repository mvn clean install -Ppre -DskipTests -from the command line before attempting to load the remaining Apollo modules into your IDE. Again, this only need be done once as this will be installed in your local Maven repository and you won't have to do it again. Rebuilding this module will have no adverse effect on the rest of the build. +from the command line before attempting to load the remaining Apollo modules into your IDE. Again, this only need be +done once as this will be installed in your local Maven repository and you won't have to do it again. Rebuilding this +module will have no adverse effect on the rest of the build. ### Eclipse M2E issues with ${os.detected.classifier} -This is a known weirdness with Eclipse M2E with the [os-maven-plugin build extension](https://github.com/trustin/os-maven-plugin). I've been fine with this, but ran into another project that Eclipse just kept refusing to resolve. I solved this by downloading the [supplied maven plugin](https://repo1.maven.org/maven2/kr/motd/maven/os-maven-plugin/1.7.0/os-maven-plugin-1.7.0.jar) and adding this to the **/dropins** directory. This works because the plugin is also an Eclipse plugin, which is nice. +This is a known weirdness with Eclipse M2E with +the [os-maven-plugin build extension](https://github.com/trustin/os-maven-plugin). I've been fine with this, but ran +into another project that Eclipse just kept refusing to resolve. I solved this by downloading +the [supplied maven plugin](https://repo1.maven.org/maven2/kr/motd/maven/os-maven-plugin/1.7.0/os-maven-plugin-1.7.0.jar) +and adding this to the **/dropins** directory. This works because the plugin is also an Eclipse plugin, +which is nice. ### Your IDE and Maven code generation -Due to the code generation requirements (really, I can't do jack about them, so complaining is silly), the generation phase can occasionally cause interesting issues with your IDE whne you import Apollo. I work with Eclipse, and things are relatively fine with the current releases. However, there are sometimes synchronization issues in Eclipse Maven integration that invalidates the generated code and that may require an additional *generate-sources* pass. Apollo is a multi-module project and be sure you're leaving time for the asynchronous build process to complete. +Due to the code generation requirements (really, I can't do jack about them, so complaining is silly), the generation +phase can occasionally cause interesting issues with your IDE whne you import Apollo. I work with Eclipse, and things +are relatively fine with the current releases. However, there are sometimes synchronization issues in Eclipse Maven +integration that invalidates the generated code and that may require an additional *generate-sources* pass. Apollo is a +multi-module project and be sure you're leaving time for the asynchronous build process to complete. I have no idea about IntellJ or Visual Code, so you're on your own there. -What I _strongly_ recommend is first building from the command line with **-DskipTests** - i.e **mvn clean install -DskipTests**. This will ensure all dependencies are downloaded and all the code generation is complete. Further, if you haven't updated from this repo in a while, don't try to be clever. Delete all the modules from this project from your ide, build/test from the command line and _then_ reimport things. Don't ask for trouble, I always say. +What I _strongly_ recommend is first building from the command line with **-DskipTests** - i.e **mvn clean install +-DskipTests**. This will ensure all dependencies are downloaded and all the code generation is complete. Further, if you +haven't updated from this repo in a while, don't try to be clever. Delete all the modules from this project from your +ide, build/test from the command line and _then_ reimport things. Don't ask for trouble, I always say. -After you do this, you shouldn't have any issue *if* your IDE Maven integration knows about and takes care of using the build-helper plugin to manage compilation directories for the module in the IDE. However.... +After you do this, you shouldn't have any issue *if* your IDE Maven integration knows about and takes care of using the +build-helper plugin to manage compilation directories for the module in the IDE. However.... -Myself, I find that I have to first select the top level Apollo.app module, and then **Menu -> Run As -> Maven generate sources** (or the equivalent in your IDE). This *should* generate all the sources required for every submodule, so... +Myself, I find that I have to first select the top level Apollo.app module, and then **Menu -> Run As -> Maven generate +sources** (or the equivalent in your IDE). This *should* generate all the sources required for every submodule, so... -Feel free to generate issues and such and I will look into it as I do want this to be flawless and a good experience. I know that's impossible, but it undoubtedly can be made better, and PRs are of course a thing. +Feel free to generate issues and such and I will look into it as I do want this to be flawless and a good experience. I +know that's impossible, but it undoubtedly can be made better, and PRs are of course a thing. -Note that also, for inexplicable reasons, Eclipse Maven will determine it needs to invalidate the _grpc_ generated code and will thus need to be regenerated. I'm trying to figure out the heck is going on, but when this happens please simply regenerate by selecting the _grpc_ module and performing: Menu -> Run As -> Maven generate sources (or the equivalent in your IDE). +Note that also, for inexplicable reasons, Eclipse Maven will determine it needs to invalidate the _grpc_ generated code +and will thus need to be regenerated. I'm trying to figure out the heck is going on, but when this happens please simply +regenerate by selecting the _grpc_ module and performing: Menu -> Run As -> Maven generate sources (or the equivalent in +your IDE). ## Metrics + Apollo uses Dropwizard Metrics and these are available for Fireflies, Reliable Broadcast, Ethereal and CHOAM. ## Testing -By default, the build uses a reduced number of simulated clients for testing. To enable the larger test suite, use the system property "large_tests". For example + +By default, the build uses a reduced number of simulated clients for testing. To enable the larger test suite, use the +system property "large_tests". For example mvn clean install -Dlarge_tests=true -This requires a decent amount of resources, using two orders of magnitude more simulated clients in the tests, with longer serial transaction chains per transactioneer client. This runs fine on my Apple M1max, but this is a beefy machine. YMMV. +This requires a decent amount of resources, using two orders of magnitude more simulated clients in the tests, with +longer serial transaction chains per transactioneer client. This runs fine on my Apple M1max, but this is a beefy +machine. YMMV. ## Current Status -Currently, the system is in devlopment. Fundamental identity and digest/signature/pubKey encodings has been integrated. Apollo is using Aleph-BFT for consensus, in the form of the Ethereal module. CHOAM has now replaced Consortium, and the SQL replicated state machine now uses CHOAM for it's linear log and transaction model. -Multitenant shards is in place and being worked upon currently. This integrates Stereotomy and Delphinius using CHOAM. An E2E test of the ReBAC Delphinius service is in development being tested. Full integration of ProcessDomains using Fireflies discovery is in development. +Currently, the system is in devlopment. Fundamental identity and digest/signature/pubKey encodings has been integrated. +Apollo is using Aleph-BFT for consensus, in the form of the Ethereal module. CHOAM has now replaced Consortium, and the +SQL replicated state machine now uses CHOAM for it's linear log and transaction model. + +Multitenant shards is in place and being worked upon currently. This integrates Stereotomy and Delphinius using CHOAM. +An E2E test of the ReBAC Delphinius service is in development being tested. Full integration of ProcessDomains using +Fireflies discovery is in development. diff --git a/choam/src/main/java/com/salesforce/apollo/choam/CHOAM.java b/choam/src/main/java/com/salesforce/apollo/choam/CHOAM.java index acf74fec54..d004baddf7 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/CHOAM.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/CHOAM.java @@ -11,20 +11,19 @@ import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.Message; -import com.salesfoce.apollo.choam.proto.*; -import com.salesfoce.apollo.choam.proto.SubmitResult.Result; -import com.salesfoce.apollo.cryptography.proto.PubKey; -import com.salesfoce.apollo.messaging.proto.AgedMessageOrBuilder; import com.salesforce.apollo.archipelago.RouterImpl.CommonCommunications; import com.salesforce.apollo.bloomFilters.BloomFilter; import com.salesforce.apollo.choam.comm.*; import com.salesforce.apollo.choam.fsm.Combine; import com.salesforce.apollo.choam.fsm.Combine.Merchantile; +import com.salesforce.apollo.choam.proto.*; +import com.salesforce.apollo.choam.proto.SubmitResult.Result; import com.salesforce.apollo.choam.support.*; import com.salesforce.apollo.choam.support.Bootstrapper.SynchronizedState; import com.salesforce.apollo.choam.support.HashedCertifiedBlock.NullBlock; import com.salesforce.apollo.cryptography.*; import com.salesforce.apollo.cryptography.Signer.SignerImpl; +import com.salesforce.apollo.cryptography.proto.PubKey; import com.salesforce.apollo.membership.Context; import com.salesforce.apollo.membership.GroupIterator; import com.salesforce.apollo.membership.Member; @@ -32,6 +31,7 @@ import com.salesforce.apollo.membership.messaging.rbc.ReliableBroadcaster; import com.salesforce.apollo.membership.messaging.rbc.ReliableBroadcaster.MessageAdapter; import com.salesforce.apollo.membership.messaging.rbc.ReliableBroadcaster.Msg; +import com.salesforce.apollo.messaging.proto.AgedMessageOrBuilder; import com.salesforce.apollo.utils.Utils; import io.grpc.StatusRuntimeException; import org.h2.mvstore.MVMap; @@ -75,9 +75,9 @@ public class CHOAM { private final AtomicReference genesis = new AtomicReference<>(); private final AtomicReference head = new AtomicReference<>(); private final ExecutorService linear; - private final AtomicReference next = new AtomicReference<>(); - private final AtomicReference nextViewId = new AtomicReference<>(); - private final Parameters params; + private final AtomicReference next = new AtomicReference<>(); + private final AtomicReference nextViewId = new AtomicReference<>(); + private final Parameters params; private final PriorityBlockingQueue pending = new PriorityBlockingQueue<>(); private final RoundScheduler roundScheduler; private final Session session; @@ -587,6 +587,10 @@ private void genesisInitialization(final HashedBlock h, final List } } + private String getLabel() { + return "CHOAM" + params.member().getId() + params.context().getId(); + } + private boolean isNext(HashedBlock next) { if (next == null) { return false; @@ -938,10 +942,6 @@ private void synchronizedProcess(CertifiedBlock certifiedBlock) { pending.add(hcb); } - private String getLabel() { - return "CHOAM" + params.member().getId() + params.context().getId(); - } - public interface BlockProducer { Block checkpoint(); diff --git a/choam/src/main/java/com/salesforce/apollo/choam/Committee.java b/choam/src/main/java/com/salesforce/apollo/choam/Committee.java index 9043b554db..7b6b67ebe9 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/Committee.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/Committee.java @@ -15,12 +15,12 @@ import org.slf4j.Logger; -import com.salesfoce.apollo.choam.proto.Certification; -import com.salesfoce.apollo.choam.proto.Reconfigure; -import com.salesfoce.apollo.choam.proto.SubmitResult; -import com.salesfoce.apollo.choam.proto.SubmitResult.Result; -import com.salesfoce.apollo.choam.proto.Transaction; -import com.salesfoce.apollo.choam.proto.ViewMember; +import com.salesforce.apollo.choam.proto.Certification; +import com.salesforce.apollo.choam.proto.Reconfigure; +import com.salesforce.apollo.choam.proto.SubmitResult; +import com.salesforce.apollo.choam.proto.SubmitResult.Result; +import com.salesforce.apollo.choam.proto.Transaction; +import com.salesforce.apollo.choam.proto.ViewMember; import com.salesforce.apollo.choam.support.HashedCertifiedBlock; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; @@ -33,7 +33,6 @@ /** * @author hal.hildebrand - * */ public interface Committee { @@ -41,13 +40,12 @@ static Map validatorsOf(Reconfigure reconfigure, Context context.getMember(new Digest(e.getMember().getId())), - e -> new DefaultVerifier(publicKey(e.getMember() - .getConsensusKey())))); + e -> new DefaultVerifier( + publicKey(e.getMember().getConsensusKey())))); } /** - * Create a view based on the cut of the supplied hash across the rings of the - * base context + * Create a view based on the cut of the supplied hash across the rings of the base context */ static Context viewFor(Digest hash, Context baseContext) { Context newView = new ContextImpl<>(hash, baseContext.getRingCount(), diff --git a/choam/src/main/java/com/salesforce/apollo/choam/GenesisAssembly.java b/choam/src/main/java/com/salesforce/apollo/choam/GenesisAssembly.java index 247a364223..85b46c08c3 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/GenesisAssembly.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/GenesisAssembly.java @@ -9,8 +9,8 @@ import com.chiralbehaviors.tron.Fsm; import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; -import com.salesfoce.apollo.choam.proto.*; -import com.salesfoce.apollo.cryptography.proto.PubKey; +import com.salesforce.apollo.choam.proto.*; +import com.salesforce.apollo.cryptography.proto.PubKey; import com.salesforce.apollo.archipelago.RouterImpl.CommonCommunications; import com.salesforce.apollo.choam.comm.Terminal; import com.salesforce.apollo.choam.fsm.Genesis; diff --git a/choam/src/main/java/com/salesforce/apollo/choam/GenesisContext.java b/choam/src/main/java/com/salesforce/apollo/choam/GenesisContext.java index d49e7efe91..8d7f4c801a 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/GenesisContext.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/GenesisContext.java @@ -8,7 +8,7 @@ import java.util.Collections; -import com.salesfoce.apollo.choam.proto.Validate; +import com.salesforce.apollo.choam.proto.Validate; import com.salesforce.apollo.choam.CHOAM.BlockProducer; import com.salesforce.apollo.cryptography.Signer; import com.salesforce.apollo.cryptography.Verifier; @@ -17,7 +17,6 @@ /** * @author hal.hildebrand - * */ public class GenesisContext extends ViewContext { diff --git a/choam/src/main/java/com/salesforce/apollo/choam/Parameters.java b/choam/src/main/java/com/salesforce/apollo/choam/Parameters.java index 49f3535233..1cc21136f2 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/Parameters.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/Parameters.java @@ -11,10 +11,10 @@ import com.netflix.concurrency.limits.limit.AIMDLimit; import com.netflix.concurrency.limits.limiter.LifoBlockingLimiter; import com.netflix.concurrency.limits.limiter.SimpleLimiter; -import com.salesfoce.apollo.choam.proto.FoundationSeal; -import com.salesfoce.apollo.choam.proto.Join; -import com.salesfoce.apollo.choam.proto.Transaction; -import com.salesfoce.apollo.stereotomy.event.proto.KERL_; +import com.salesforce.apollo.choam.proto.FoundationSeal; +import com.salesforce.apollo.choam.proto.Join; +import com.salesforce.apollo.choam.proto.Transaction; +import com.salesforce.apollo.stereotomy.event.proto.KERL_; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.choam.CHOAM.TransactionExecutor; import com.salesforce.apollo.choam.support.CheckpointState; @@ -60,32 +60,36 @@ public static Builder newBuilder() { return new Builder(); } - public int majority() { - return runtime.context.majority(); + public Function checkpointer() { + return runtime.checkpointer; } - public SigningMember member() { - return runtime.member; + public Router communications() { + return runtime.communications; } public Context context() { return runtime.context; } - public Router communications() { - return runtime.communications; + public Function, List> genesisData() { + return runtime.genesisData; } - public ChoamMetrics metrics() { - return runtime.metrics; + public Supplier kerl() { + return runtime.kerl; } - public Function checkpointer() { - return runtime.checkpointer; + public int majority() { + return runtime.context.majority(); } - public Function, List> genesisData() { - return runtime.genesisData; + public SigningMember member() { + return runtime.member; + } + + public ChoamMetrics metrics() { + return runtime.metrics; } public TransactionExecutor processor() { @@ -96,10 +100,6 @@ public BiConsumer restorer() { return runtime.restorer; } - public Supplier kerl() { - return runtime.kerl; - } - public static class MvStoreBuilder implements Cloneable { private int autoCommitBufferSize = -1; private int autoCompactFillRate = -1; @@ -580,10 +580,6 @@ public Limiter build(String name, MetricRegistry metrics) { .build(); } - public Duration getbacklogDuration() { - return backlogDuration; - } - public int getBacklogSize() { return backlogSize; } @@ -638,6 +634,10 @@ public LimiterBuilder setTimeout(Duration timeout) { return this; } + public Duration getbacklogDuration() { + return backlogDuration; + } + public LimiterBuilder setBacklogDuration(Duration backlogDuration) { this.backlogDuration = backlogDuration; return this; @@ -679,14 +679,6 @@ public static class Builder implements Cloneable { private SignatureAlgorithm viewSigAlgorithm = SignatureAlgorithm.DEFAULT; private int crowns = 2; - public int getCrowns() { - return crowns; - } - - public void setCrowns(int crowns) { - this.crowns = crowns; - } - public Parameters build(RuntimeParameters runtime) { return new Parameters(runtime, combine, gossipDuration, maxCheckpointSegments, submitTimeout, genesisViewId, checkpointBlockDelta, crowns, digestAlgorithm, viewSigAlgorithm, @@ -739,6 +731,14 @@ public Builder setCombine(ReliableBroadcaster.Parameters combine) { return this; } + public int getCrowns() { + return crowns; + } + + public void setCrowns(int crowns) { + this.crowns = crowns; + } + public DigestAlgorithm getDigestAlgorithm() { return digestAlgorithm; } diff --git a/choam/src/main/java/com/salesforce/apollo/choam/Producer.java b/choam/src/main/java/com/salesforce/apollo/choam/Producer.java index cf5805d8ac..422894277a 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/Producer.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/Producer.java @@ -23,16 +23,16 @@ import com.chiralbehaviors.tron.Fsm; import com.google.protobuf.InvalidProtocolBufferException; -import com.salesfoce.apollo.choam.proto.Assemble; -import com.salesfoce.apollo.choam.proto.Block; -import com.salesfoce.apollo.choam.proto.CertifiedBlock; -import com.salesfoce.apollo.choam.proto.Executions; -import com.salesfoce.apollo.choam.proto.Reassemble; -import com.salesfoce.apollo.choam.proto.SubmitResult; -import com.salesfoce.apollo.choam.proto.SubmitResult.Result; -import com.salesfoce.apollo.choam.proto.Transaction; -import com.salesfoce.apollo.choam.proto.UnitData; -import com.salesfoce.apollo.choam.proto.Validate; +import com.salesforce.apollo.choam.proto.Assemble; +import com.salesforce.apollo.choam.proto.Block; +import com.salesforce.apollo.choam.proto.CertifiedBlock; +import com.salesforce.apollo.choam.proto.Executions; +import com.salesforce.apollo.choam.proto.Reassemble; +import com.salesforce.apollo.choam.proto.SubmitResult; +import com.salesforce.apollo.choam.proto.SubmitResult.Result; +import com.salesforce.apollo.choam.proto.Transaction; +import com.salesforce.apollo.choam.proto.UnitData; +import com.salesforce.apollo.choam.proto.Validate; import com.salesforce.apollo.archipelago.RouterImpl.CommonCommunications; import com.salesforce.apollo.choam.comm.Terminal; import com.salesforce.apollo.choam.fsm.Driven; @@ -50,140 +50,29 @@ /** * An "Earner" - * - * @author hal.hildebrand * + * @author hal.hildebrand */ public class Producer { - record PendingBlock(HashedBlock block, Map witnesses, AtomicBoolean published) {} - - /** Leaf action Driven coupling for the Earner FSM */ - private class DriveIn implements Driven { - - @Override - public void assembled() { - final var slate = assembly.get().getSlate(); - var reconfiguration = new HashedBlock(params().digestAlgorithm(), - view.reconfigure(slate, nextViewId, previousBlock.get(), - checkpoint.get())); - var validation = view.generateValidation(reconfiguration); - final var p = new PendingBlock(reconfiguration, new HashMap<>(), new AtomicBoolean()); - pending.put(reconfiguration.hash, p); - p.witnesses.put(params().member(), validation); - ds.offer(validation); - log.info("Reconfiguration block: {} height: {} produced on: {}", reconfiguration.hash, - reconfiguration.height(), params().member().getId()); - } - - @Override - public void checkAssembly() { - ds.drain(); - final var dropped = ds.getRemainingTransactions(); - if (dropped != 0) { - log.warn("Dropped txns: {} on: {}", dropped, params().member().getId()); - } - final var viewAssembly = assembly.get(); - if (viewAssembly == null) { - log.warn("Assemble block never processed on: {}", params().member().getId()); - return; - } - viewAssembly.finalElection(); - if (assembled.get()) { - assembled(); - } - } - - @Override - public void checkpoint() { - log.info("Generating checkpoint block on: {}", params().member().getId()); - Block ckpt = view.checkpoint(); - if (ckpt == null) { - log.error("Cannot generate checkpoint block on: {}", params().member().getId()); - transitions.failed(); - return; - } - var next = new HashedBlock(params().digestAlgorithm(), ckpt); - previousBlock.set(next); - checkpoint.set(next); - var validation = view.generateValidation(next); - ds.offer(validation); - final var p = new PendingBlock(next, new HashMap<>(), new AtomicBoolean()); - pending.put(next.hash, p); - p.witnesses.put(params().member(), validation); - log.info("Produced checkpoint: {} height: {} for: {} on: {}", next.hash, next.height(), getViewId(), - params().member().getId()); - transitions.checkpointed(); - } - - @Override - public void complete() { - stop(); - } - - @Override - public void create(List preblock, boolean last) { - Producer.this.create(preblock, last); - } - - @Override - public void fail() { - stop(); - } - - @Override - public void produceAssemble() { - Producer.this.produceAssemble(); - } - - @Override - public void reconfigure() { - log.debug("Starting view reconfiguration for: {} on: {}", nextViewId, params().member().getId()); - assembly.set(new ViewAssembly(nextViewId, view, r -> addReassemble(r), comms) { - @Override - public void complete() { - super.complete(); - log.debug("View reconfiguration: {} gathered: {} complete on: {}", nextViewId, getSlate().size(), - params().member().getId()); - assembled.set(true); - Producer.this.transitions.viewComplete(); - } - }); - assembly.get().start(); - assembly.get().assembled(); - List reasses = new ArrayList<>(); - pendingReassembles.drainTo(reasses); - assembly.get().inbound().accept(reasses); - } - - @Override - public void startProduction() { - log.debug("Starting production for: {} on: {}", getViewId(), params().member().getId()); - controller.start(); - coordinator.start(params().producer().gossipDuration()); - } - } - private static final Logger log = LoggerFactory.getLogger(Producer.class); - - private final AtomicBoolean assembled = new AtomicBoolean(); - private final AtomicReference assembly = new AtomicReference<>(); - private final AtomicReference checkpoint = new AtomicReference<>(); - private final CommonCommunications comms; - private final Ethereal controller; - private final ChRbcGossip coordinator; - private final TxDataSource ds; - private final int lastEpoch; - private final Set nextAssembly = new HashSet<>(); - private volatile Digest nextViewId; - private final Map pending = new ConcurrentHashMap<>(); - private final BlockingQueue pendingReassembles = new LinkedBlockingQueue<>(); - private final AtomicReference previousBlock = new AtomicReference<>(); - private final AtomicBoolean started = new AtomicBoolean(false); - private final Transitions transitions; - private final ViewContext view; - - public Producer( ViewContext view, HashedBlock lastBlock, HashedBlock checkpoint, + private final AtomicBoolean assembled = new AtomicBoolean(); + private final AtomicReference assembly = new AtomicReference<>(); + private final AtomicReference checkpoint = new AtomicReference<>(); + private final CommonCommunications comms; + private final Ethereal controller; + private final ChRbcGossip coordinator; + private final TxDataSource ds; + private final int lastEpoch; + private final Set nextAssembly = new HashSet<>(); + private final Map pending = new ConcurrentHashMap<>(); + private final BlockingQueue pendingReassembles = new LinkedBlockingQueue<>(); + private final AtomicReference previousBlock = new AtomicReference<>(); + private final AtomicBoolean started = new AtomicBoolean(false); + private final Transitions transitions; + private final ViewContext view; + private volatile Digest nextViewId; + public Producer(ViewContext view, HashedBlock lastBlock, HashedBlock checkpoint, CommonCommunications comms, String label) { assert view != null; this.view = view; @@ -248,7 +137,7 @@ public void start() { } final Block prev = previousBlock.get().block; if (prev.hasReconfigure() && prev.getReconfigure().getCheckpointTarget() == 0) { // genesis block won't ever be - // 0 + // 0 transitions.checkpoint(); } else { transitions.start(); @@ -323,11 +212,11 @@ private void create(List preblock, boolean last) { final var txns = aggregate.stream().flatMap(e -> e.getTransactionsList().stream()).toList(); if (!txns.isEmpty()) { - log.trace("transactions: {} comb hash: {} height: {} on: {}", txns.size(), - txns.stream() - .map(t -> CHOAM.hashOf(t, params().digestAlgorithm())) - .reduce((a, b) -> a.xor(b)) - .orElse(null), + log.trace("transactions: {} comb hash: {} height: {} on: {}", txns.size(), txns.stream() + .map(t -> CHOAM.hashOf(t, + params().digestAlgorithm())) + .reduce((a, b) -> a.xor(b)) + .orElse(null), lb.height().add(1), params().member().getId()); var builder = Executions.newBuilder(); txns.forEach(e -> builder.addExecutions(e)); @@ -367,12 +256,12 @@ private void produceAssemble() { final var vlb = previousBlock.get(); nextViewId = vlb.hash; nextAssembly.addAll(Committee.viewMembersOf(nextViewId, params().context())); - final var assemble = new HashedBlock(params().digestAlgorithm(), - view.produce(vlb.height().add(1), vlb.hash, - Assemble.newBuilder() - .setNextView(vlb.hash.toDigeste()) - .build(), - checkpoint.get())); + final var assemble = new HashedBlock(params().digestAlgorithm(), view.produce(vlb.height().add(1), vlb.hash, + Assemble.newBuilder() + .setNextView( + vlb.hash.toDigeste()) + .build(), + checkpoint.get())); previousBlock.set(assemble); final var validation = view.generateValidation(assemble); final var p = new PendingBlock(assemble, new HashMap<>(), new AtomicBoolean()); @@ -384,18 +273,16 @@ private void produceAssemble() { } private void publish(PendingBlock p) { -// assert previousBlock.get().hash.equals(Digest.from(p.block.block.getHeader().getPrevious())) : "Pending block: " -// + p.block.hash + " previous: " + Digest.from(p.block.block.getHeader().getPrevious()) + " is not: " -// + previousBlock.get().hash; + // assert previousBlock.get().hash.equals(Digest.from(p.block.block.getHeader().getPrevious())) : "Pending block: " + // + p.block.hash + " previous: " + Digest.from(p.block.block.getHeader().getPrevious()) + " is not: " + // + previousBlock.get().hash; log.debug("Published pending: {} height: {} on: {}", p.block.hash, p.block.height(), params().member().getId()); p.published.set(true); pending.remove(p.block.hash); final var cb = CertifiedBlock.newBuilder() .setBlock(p.block.block) - .addAllCertifications(p.witnesses.values() - .stream() - .map(v -> v.getWitness()) - .toList()) + .addAllCertifications( + p.witnesses.values().stream().map(v -> v.getWitness()).toList()) .build(); view.publish(new HashedCertifiedBlock(params().digestAlgorithm(), cb)); } @@ -413,4 +300,113 @@ private PendingBlock validate(Validate v) { p.witnesses.put(view.context().getMember(Digest.from(v.getWitness().getId())), v); return p; } + + record PendingBlock(HashedBlock block, Map witnesses, AtomicBoolean published) { + } + + /** Leaf action Driven coupling for the Earner FSM */ + private class DriveIn implements Driven { + + @Override + public void assembled() { + final var slate = assembly.get().getSlate(); + var reconfiguration = new HashedBlock(params().digestAlgorithm(), + view.reconfigure(slate, nextViewId, previousBlock.get(), + checkpoint.get())); + var validation = view.generateValidation(reconfiguration); + final var p = new PendingBlock(reconfiguration, new HashMap<>(), new AtomicBoolean()); + pending.put(reconfiguration.hash, p); + p.witnesses.put(params().member(), validation); + ds.offer(validation); + log.info("Reconfiguration block: {} height: {} produced on: {}", reconfiguration.hash, + reconfiguration.height(), params().member().getId()); + } + + @Override + public void checkAssembly() { + ds.drain(); + final var dropped = ds.getRemainingTransactions(); + if (dropped != 0) { + log.warn("Dropped txns: {} on: {}", dropped, params().member().getId()); + } + final var viewAssembly = assembly.get(); + if (viewAssembly == null) { + log.warn("Assemble block never processed on: {}", params().member().getId()); + return; + } + viewAssembly.finalElection(); + if (assembled.get()) { + assembled(); + } + } + + @Override + public void checkpoint() { + log.info("Generating checkpoint block on: {}", params().member().getId()); + Block ckpt = view.checkpoint(); + if (ckpt == null) { + log.error("Cannot generate checkpoint block on: {}", params().member().getId()); + transitions.failed(); + return; + } + var next = new HashedBlock(params().digestAlgorithm(), ckpt); + previousBlock.set(next); + checkpoint.set(next); + var validation = view.generateValidation(next); + ds.offer(validation); + final var p = new PendingBlock(next, new HashMap<>(), new AtomicBoolean()); + pending.put(next.hash, p); + p.witnesses.put(params().member(), validation); + log.info("Produced checkpoint: {} height: {} for: {} on: {}", next.hash, next.height(), getViewId(), + params().member().getId()); + transitions.checkpointed(); + } + + @Override + public void complete() { + stop(); + } + + @Override + public void create(List preblock, boolean last) { + Producer.this.create(preblock, last); + } + + @Override + public void fail() { + stop(); + } + + @Override + public void produceAssemble() { + Producer.this.produceAssemble(); + } + + @Override + public void reconfigure() { + log.debug("Starting view reconfiguration for: {} on: {}", nextViewId, params().member().getId()); + assembly.set(new ViewAssembly(nextViewId, view, r -> addReassemble(r), comms) { + @Override + public void complete() { + super.complete(); + log.debug("View reconfiguration: {} gathered: {} complete on: {}", nextViewId, getSlate().size(), + params().member().getId()); + assembled.set(true); + Producer.this.transitions.viewComplete(); + } + }); + assembly.get().start(); + assembly.get().assembled(); + List reasses = new ArrayList<>(); + pendingReassembles.drainTo(reasses); + assembly.get().inbound().accept(reasses); + } + + @Override + public void startProduction() { + log.debug("Starting production for: {} on: {}", getViewId(), params().member().getId()); + controller.start(); + coordinator.start(params().producer().gossipDuration()); + } + } } diff --git a/choam/src/main/java/com/salesforce/apollo/choam/Session.java b/choam/src/main/java/com/salesforce/apollo/choam/Session.java index 295c247032..99273d129f 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/Session.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/Session.java @@ -11,8 +11,8 @@ import com.google.protobuf.Message; import com.netflix.concurrency.limits.Limiter; import com.netflix.concurrency.limits.internal.EmptyMetricRegistry; -import com.salesfoce.apollo.choam.proto.SubmitResult; -import com.salesfoce.apollo.choam.proto.Transaction; +import com.salesforce.apollo.choam.proto.SubmitResult; +import com.salesforce.apollo.choam.proto.Transaction; import com.salesforce.apollo.choam.support.HashedCertifiedBlock; import com.salesforce.apollo.choam.support.InvalidTransaction; import com.salesforce.apollo.choam.support.SubmittedTransaction; @@ -21,6 +21,7 @@ import com.salesforce.apollo.cryptography.JohnHancock; import com.salesforce.apollo.cryptography.Signer; import com.salesforce.apollo.cryptography.Verifier; +import com.salesforce.apollo.utils.Utils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -102,24 +103,16 @@ public void cancelAll() { submitted.values().forEach(stx -> stx.onCompletion().cancel(true)); } - /** - * Submit a transaction. - * - * @param transaction - the Message to submit as a transaction - * @param retries - the number of retries for Cancelled transaction submissions - * @param timeout - non-null timeout of the transaction - * @return onCompletion - the future result of the submitted transaction - * @throws InvalidTransaction - if the submitted transaction is invalid in any way - */ - public CompletableFuture submit(Message transaction, int retries, Duration timeout) - throws InvalidTransaction { - return retryNesting(() -> { - try { - return submit(transaction, timeout); - } catch (InvalidTransaction e) { - throw new IllegalStateException("Invalid txn", e); + public void setView(HashedCertifiedBlock v) { + view.set(v); + var currentHeight = v.height(); + for (var it = submitted.entrySet().iterator(); it.hasNext(); ) { + var e = it.next(); + if (e.getValue().view().compareTo(currentHeight) < 0) { + e.getValue().onCompletion().cancel(true); + it.remove(); } - }, retries); + } } /** @@ -177,7 +170,7 @@ public CompletableFuture submit(Message transaction, Duration timeout) th if (params.metrics() != null) { params.metrics().transactionSubmittedSuccess(); } - var futureTimeout = scheduler.schedule(() -> { + var futureTimeout = scheduler.schedule(() -> Thread.ofVirtual().start(Utils.wrapped(() -> { if (result.isDone()) { return; } @@ -187,7 +180,7 @@ public CompletableFuture submit(Message transaction, Duration timeout) th if (params.metrics() != null) { params.metrics().transactionComplete(to); } - }, timeout.toMillis(), TimeUnit.MILLISECONDS); + }, log)), timeout.toMillis(), TimeUnit.MILLISECONDS); return result.whenComplete((r, t) -> { futureTimeout.cancel(true); @@ -266,20 +259,28 @@ public CompletableFuture submit(Message transaction, Duration timeout) th return result; } - public int submitted() { - return submitted.size(); + /** + * Submit a transaction. + * + * @param transaction - the Message to submit as a transaction + * @param retries - the number of retries for Cancelled transaction submissions + * @param timeout - non-null timeout of the transaction + * @return onCompletion - the future result of the submitted transaction + * @throws InvalidTransaction - if the submitted transaction is invalid in any way + */ + public CompletableFuture submit(Message transaction, int retries, Duration timeout) + throws InvalidTransaction { + return retryNesting(() -> { + try { + return submit(transaction, timeout); + } catch (InvalidTransaction e) { + throw new IllegalStateException("Invalid txn", e); + } + }, retries); } - public void setView(HashedCertifiedBlock v) { - view.set(v); - var currentHeight = v.height(); - for (var it = submitted.entrySet().iterator(); it.hasNext(); ) { - var e = it.next(); - if (e.getValue().view().compareTo(currentHeight) < 0) { - e.getValue().onCompletion().cancel(true); - it.remove(); - } - } + public int submitted() { + return submitted.size(); } SubmittedTransaction complete(Digest hash) { diff --git a/choam/src/main/java/com/salesforce/apollo/choam/ViewAssembly.java b/choam/src/main/java/com/salesforce/apollo/choam/ViewAssembly.java index ab36396327..c7cf17ad99 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/ViewAssembly.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/ViewAssembly.java @@ -7,16 +7,17 @@ package com.salesforce.apollo.choam; import com.chiralbehaviors.tron.Fsm; -import com.salesfoce.apollo.choam.proto.*; -import com.salesfoce.apollo.cryptography.proto.PubKey; import com.salesforce.apollo.archipelago.RouterImpl.CommonCommunications; import com.salesforce.apollo.choam.comm.Terminal; import com.salesforce.apollo.choam.fsm.Reconfiguration; import com.salesforce.apollo.choam.fsm.Reconfiguration.Reconfigure; import com.salesforce.apollo.choam.fsm.Reconfiguration.Transitions; +import com.salesforce.apollo.choam.proto.*; import com.salesforce.apollo.cryptography.Digest; +import com.salesforce.apollo.cryptography.proto.PubKey; import com.salesforce.apollo.membership.Member; import com.salesforce.apollo.ring.SliceIterator; +import com.salesforce.apollo.utils.Utils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -150,7 +151,8 @@ private void completeSlice(AtomicReference retryDelay, AtomicReference proposals.keySet().stream().toList(), nextAssembly.size(), delay, params().member().getId()); if (!cancelSlice.get()) { Executors.newScheduledThreadPool(1, Thread.ofVirtual().factory()) - .schedule(() -> reiterate.get().run(), delay.toMillis(), TimeUnit.MILLISECONDS); + .schedule(() -> Thread.ofVirtual().start(Utils.wrapped(reiterate.get(), log)), delay.toMillis(), + TimeUnit.MILLISECONDS); } } diff --git a/choam/src/main/java/com/salesforce/apollo/choam/ViewContext.java b/choam/src/main/java/com/salesforce/apollo/choam/ViewContext.java index 579537f304..51986e6dd3 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/ViewContext.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/ViewContext.java @@ -15,13 +15,13 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.salesfoce.apollo.choam.proto.Assemble; -import com.salesfoce.apollo.choam.proto.Block; -import com.salesfoce.apollo.choam.proto.Certification; -import com.salesfoce.apollo.choam.proto.Executions; -import com.salesfoce.apollo.choam.proto.Join; -import com.salesfoce.apollo.choam.proto.Validate; -import com.salesfoce.apollo.choam.proto.ViewMember; +import com.salesforce.apollo.choam.proto.Assemble; +import com.salesforce.apollo.choam.proto.Block; +import com.salesforce.apollo.choam.proto.Certification; +import com.salesforce.apollo.choam.proto.Executions; +import com.salesforce.apollo.choam.proto.Join; +import com.salesforce.apollo.choam.proto.Validate; +import com.salesforce.apollo.choam.proto.ViewMember; import com.salesforce.apollo.choam.CHOAM.BlockProducer; import com.salesforce.apollo.choam.support.HashedBlock; import com.salesforce.apollo.choam.support.HashedCertifiedBlock; @@ -35,34 +35,16 @@ /** * @author hal.hildebrand - * */ public class ViewContext { private final static Logger log = LoggerFactory.getLogger(ViewContext.class); - - public static String print(Certification c, DigestAlgorithm algo) { - return String.format("id: %s sig: %s", Digest.from(c.getId()), algo.digest(c.getSignature().toByteString())); - } - - public static String print(Validate v, DigestAlgorithm algo) { - return String.format("id: %s hash: %s sig: %s", Digest.from(v.getWitness().getId()), Digest.from(v.getHash()), - algo.digest(v.getWitness().getSignature().toByteString())); - } - - public static String print(ViewMember vm, DigestAlgorithm algo) { - return String.format("id: %s key: %s sig: %s", Digest.from(vm.getId()), - algo.digest(publicKey(vm.getConsensusKey()).getEncoded()), - algo.digest(vm.getSignature().toByteString())); - } - private final BlockProducer blockProducer; private final Context context; private final Parameters params; private final Map roster; private final Signer signer; private final Map validators; - public ViewContext(Context context, Parameters params, Signer signer, Map validators, BlockProducer blockProducer) { this.blockProducer = blockProducer; @@ -79,6 +61,21 @@ public ViewContext(Context context, Parameters params, Signer signer, Ma } } + public static String print(Certification c, DigestAlgorithm algo) { + return String.format("id: %s sig: %s", Digest.from(c.getId()), algo.digest(c.getSignature().toByteString())); + } + + public static String print(Validate v, DigestAlgorithm algo) { + return String.format("id: %s hash: %s sig: %s", Digest.from(v.getWitness().getId()), Digest.from(v.getHash()), + algo.digest(v.getWitness().getSignature().toByteString())); + } + + public static String print(ViewMember vm, DigestAlgorithm algo) { + return String.format("id: %s key: %s sig: %s", Digest.from(vm.getId()), + algo.digest(publicKey(vm.getConsensusKey()).getEncoded()), + algo.digest(vm.getSignature().toByteString())); + } + public Block checkpoint() { return blockProducer.checkpoint(); } diff --git a/choam/src/main/java/com/salesforce/apollo/choam/comm/Concierge.java b/choam/src/main/java/com/salesforce/apollo/choam/comm/Concierge.java index 33ce159317..b9d0f2c4fa 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/comm/Concierge.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/comm/Concierge.java @@ -6,18 +6,17 @@ */ package com.salesforce.apollo.choam.comm; -import com.salesfoce.apollo.choam.proto.BlockReplication; -import com.salesfoce.apollo.choam.proto.Blocks; -import com.salesfoce.apollo.choam.proto.CheckpointReplication; -import com.salesfoce.apollo.choam.proto.CheckpointSegments; -import com.salesfoce.apollo.choam.proto.Initial; -import com.salesfoce.apollo.choam.proto.Synchronize; -import com.salesfoce.apollo.choam.proto.ViewMember; +import com.salesforce.apollo.choam.proto.BlockReplication; +import com.salesforce.apollo.choam.proto.Blocks; +import com.salesforce.apollo.choam.proto.CheckpointReplication; +import com.salesforce.apollo.choam.proto.CheckpointSegments; +import com.salesforce.apollo.choam.proto.Initial; +import com.salesforce.apollo.choam.proto.Synchronize; +import com.salesforce.apollo.choam.proto.ViewMember; import com.salesforce.apollo.cryptography.Digest; /** * @author hal.hildebrand - * */ public interface Concierge { diff --git a/choam/src/main/java/com/salesforce/apollo/choam/comm/Submitter.java b/choam/src/main/java/com/salesforce/apollo/choam/comm/Submitter.java index ca6f8231a2..564b3bd867 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/comm/Submitter.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/comm/Submitter.java @@ -6,13 +6,12 @@ */ package com.salesforce.apollo.choam.comm; -import com.salesfoce.apollo.choam.proto.SubmitResult; -import com.salesfoce.apollo.choam.proto.Transaction; +import com.salesforce.apollo.choam.proto.SubmitResult; +import com.salesforce.apollo.choam.proto.Transaction; import com.salesforce.apollo.cryptography.Digest; /** * @author hal.hildebrand - * */ public interface Submitter { diff --git a/choam/src/main/java/com/salesforce/apollo/choam/comm/Terminal.java b/choam/src/main/java/com/salesforce/apollo/choam/comm/Terminal.java index 8e301eeceb..e01ac89e24 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/comm/Terminal.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/comm/Terminal.java @@ -6,7 +6,7 @@ */ package com.salesforce.apollo.choam.comm; -import com.salesfoce.apollo.choam.proto.*; +import com.salesforce.apollo.choam.proto.*; import com.salesforce.apollo.archipelago.Link; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.membership.Member; diff --git a/choam/src/main/java/com/salesforce/apollo/choam/comm/TerminalClient.java b/choam/src/main/java/com/salesforce/apollo/choam/comm/TerminalClient.java index 4ab9110b99..fb766960e3 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/comm/TerminalClient.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/comm/TerminalClient.java @@ -6,7 +6,7 @@ */ package com.salesforce.apollo.choam.comm; -import com.salesfoce.apollo.choam.proto.*; +import com.salesforce.apollo.choam.proto.*; import com.salesforce.apollo.archipelago.ManagedServerChannel; import com.salesforce.apollo.archipelago.ServerConnectionCache.CreateClientCommunications; import com.salesforce.apollo.choam.support.ChoamMetrics; @@ -18,10 +18,10 @@ */ public class TerminalClient implements Terminal { - private final ManagedServerChannel channel; + private final ManagedServerChannel channel; private final TerminalGrpc.TerminalBlockingStub client; @SuppressWarnings("unused") - private final ChoamMetrics metrics; + private final ChoamMetrics metrics; public TerminalClient(ManagedServerChannel channel, ChoamMetrics metrics) { this.channel = channel; diff --git a/choam/src/main/java/com/salesforce/apollo/choam/comm/TerminalServer.java b/choam/src/main/java/com/salesforce/apollo/choam/comm/TerminalServer.java index a466e1f027..0ea30e1694 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/comm/TerminalServer.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/comm/TerminalServer.java @@ -6,15 +6,15 @@ */ package com.salesforce.apollo.choam.comm; -import com.salesfoce.apollo.choam.proto.BlockReplication; -import com.salesfoce.apollo.choam.proto.Blocks; -import com.salesfoce.apollo.choam.proto.CheckpointReplication; -import com.salesfoce.apollo.choam.proto.CheckpointSegments; -import com.salesfoce.apollo.choam.proto.Initial; -import com.salesfoce.apollo.choam.proto.Synchronize; -import com.salesfoce.apollo.choam.proto.TerminalGrpc.TerminalImplBase; -import com.salesfoce.apollo.choam.proto.ViewMember; -import com.salesfoce.apollo.cryptography.proto.Digeste; +import com.salesforce.apollo.choam.proto.BlockReplication; +import com.salesforce.apollo.choam.proto.Blocks; +import com.salesforce.apollo.choam.proto.CheckpointReplication; +import com.salesforce.apollo.choam.proto.CheckpointSegments; +import com.salesforce.apollo.choam.proto.Initial; +import com.salesforce.apollo.choam.proto.Synchronize; +import com.salesforce.apollo.choam.proto.TerminalGrpc.TerminalImplBase; +import com.salesforce.apollo.choam.proto.ViewMember; +import com.salesforce.apollo.cryptography.proto.Digeste; import com.salesforce.apollo.archipelago.RoutableService; import com.salesforce.apollo.choam.support.ChoamMetrics; import com.salesforce.apollo.cryptography.Digest; @@ -24,13 +24,12 @@ /** * @author hal.hildebrand - * */ public class TerminalServer extends TerminalImplBase { - private ClientIdentity identity; @SuppressWarnings("unused") private final ChoamMetrics metrics; private final RoutableService router; + private ClientIdentity identity; public TerminalServer(ClientIdentity identity, ChoamMetrics metrics, RoutableService router) { this.metrics = metrics; diff --git a/choam/src/main/java/com/salesforce/apollo/choam/comm/TxnSubmission.java b/choam/src/main/java/com/salesforce/apollo/choam/comm/TxnSubmission.java index bdef2c28b9..64e07d9dc9 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/comm/TxnSubmission.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/comm/TxnSubmission.java @@ -8,15 +8,14 @@ import java.io.IOException; -import com.salesfoce.apollo.choam.proto.SubmitResult; -import com.salesfoce.apollo.choam.proto.Transaction; +import com.salesforce.apollo.choam.proto.SubmitResult; +import com.salesforce.apollo.choam.proto.Transaction; import com.salesforce.apollo.archipelago.Link; import com.salesforce.apollo.membership.Member; import com.salesforce.apollo.membership.SigningMember; /** * @author hal.hildebrand - * */ public interface TxnSubmission extends Link { static TxnSubmission getLocalLoopback(SigningMember member, Submitter service) { diff --git a/choam/src/main/java/com/salesforce/apollo/choam/comm/TxnSubmitClient.java b/choam/src/main/java/com/salesforce/apollo/choam/comm/TxnSubmitClient.java index bfeff65b7f..71c2daa33c 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/comm/TxnSubmitClient.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/comm/TxnSubmitClient.java @@ -6,10 +6,10 @@ */ package com.salesforce.apollo.choam.comm; -import com.salesfoce.apollo.choam.proto.SubmitResult; -import com.salesfoce.apollo.choam.proto.Transaction; -import com.salesfoce.apollo.choam.proto.TransactionSubmissionGrpc; -import com.salesfoce.apollo.choam.proto.TransactionSubmissionGrpc.TransactionSubmissionBlockingStub; +import com.salesforce.apollo.choam.proto.SubmitResult; +import com.salesforce.apollo.choam.proto.Transaction; +import com.salesforce.apollo.choam.proto.TransactionSubmissionGrpc; +import com.salesforce.apollo.choam.proto.TransactionSubmissionGrpc.TransactionSubmissionBlockingStub; import com.salesforce.apollo.archipelago.ManagedServerChannel; import com.salesforce.apollo.archipelago.ServerConnectionCache.CreateClientCommunications; import com.salesforce.apollo.choam.support.ChoamMetrics; @@ -17,17 +17,10 @@ /** * @author hal.hildebrand - * */ public class TxnSubmitClient implements TxnSubmission { - public static CreateClientCommunications getCreate(ChoamMetrics metrics) { - return (c) -> new TxnSubmitClient(c, metrics); - - } - private final ManagedServerChannel channel; - private final TransactionSubmissionBlockingStub client; public TxnSubmitClient(ManagedServerChannel channel, ChoamMetrics metrics) { @@ -35,6 +28,11 @@ public TxnSubmitClient(ManagedServerChannel channel, ChoamMetrics metrics) { this.client = TransactionSubmissionGrpc.newBlockingStub(channel).withCompression("gzip"); } + public static CreateClientCommunications getCreate(ChoamMetrics metrics) { + return (c) -> new TxnSubmitClient(c, metrics); + + } + @Override public void close() { channel.release(); diff --git a/choam/src/main/java/com/salesforce/apollo/choam/comm/TxnSubmitServer.java b/choam/src/main/java/com/salesforce/apollo/choam/comm/TxnSubmitServer.java index 7b789ea9db..e25c5d5159 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/comm/TxnSubmitServer.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/comm/TxnSubmitServer.java @@ -6,9 +6,9 @@ */ package com.salesforce.apollo.choam.comm; -import com.salesfoce.apollo.choam.proto.SubmitResult; -import com.salesfoce.apollo.choam.proto.Transaction; -import com.salesfoce.apollo.choam.proto.TransactionSubmissionGrpc.TransactionSubmissionImplBase; +import com.salesforce.apollo.choam.proto.SubmitResult; +import com.salesforce.apollo.choam.proto.Transaction; +import com.salesforce.apollo.choam.proto.TransactionSubmissionGrpc.TransactionSubmissionImplBase; import com.salesforce.apollo.archipelago.RoutableService; import com.salesforce.apollo.choam.support.ChoamMetrics; import com.salesforce.apollo.cryptography.Digest; @@ -19,13 +19,12 @@ /** * @author hal.hildebrand - * */ public class TxnSubmitServer extends TransactionSubmissionImplBase { - private ClientIdentity identity; @SuppressWarnings("unused") private final ChoamMetrics metrics; private final RoutableService router; + private ClientIdentity identity; public TxnSubmitServer(ClientIdentity identity, ChoamMetrics metrics, RoutableService router) { this.metrics = metrics; diff --git a/choam/src/main/java/com/salesforce/apollo/choam/support/Bootstrapper.java b/choam/src/main/java/com/salesforce/apollo/choam/support/Bootstrapper.java index bcbc97ac85..67e260ed62 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/support/Bootstrapper.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/support/Bootstrapper.java @@ -8,13 +8,13 @@ import com.google.common.collect.Multiset; import com.google.common.collect.TreeMultiset; -import com.salesfoce.apollo.choam.proto.*; import com.salesforce.apollo.archipelago.RouterImpl.CommonCommunications; import com.salesforce.apollo.bloomFilters.BloomFilter; import com.salesforce.apollo.bloomFilters.BloomFilter.ULongBloomFilter; import com.salesforce.apollo.choam.Parameters; import com.salesforce.apollo.choam.comm.Concierge; import com.salesforce.apollo.choam.comm.Terminal; +import com.salesforce.apollo.choam.proto.*; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.HexBloom; @@ -23,6 +23,7 @@ import com.salesforce.apollo.ring.RingIterator; import com.salesforce.apollo.utils.Entropy; import com.salesforce.apollo.utils.Pair; +import com.salesforce.apollo.utils.Utils; import org.joou.ULong; import org.joou.Unsigned; import org.slf4j.Logger; @@ -377,14 +378,14 @@ private void scheduleAnchorCompletion(AtomicReference start, ULong anchor } log.info("Scheduling Anchor completion ({} to {}) duration: {} on: {}", start, anchorTo, params.gossipDuration(), params.member().getId()); - scheduler.schedule(() -> { + scheduler.schedule(() -> Thread.ofVirtual().start(Utils.wrapped(() -> { try { anchor(start, anchorTo); } catch (Throwable e) { log.error("Cannot execute completeViewChain on: {}", params.member().getId()); sync.completeExceptionally(e); } - }, params.gossipDuration().toNanos(), TimeUnit.NANOSECONDS); + }, log)), params.gossipDuration().toNanos(), TimeUnit.NANOSECONDS); } private void scheduleSample() { @@ -392,7 +393,7 @@ private void scheduleSample() { return; } log.info("Scheduling state sample on: {}", params.member().getId()); - scheduler.schedule(() -> { + scheduler.schedule(() -> Thread.ofVirtual().start(Utils.wrapped(() -> { final HashedCertifiedBlock established = genesis; if (sync.isDone() || established != null) { log.trace("Synchronization isDone: {} genesis: {} on: {}", sync.isDone(), @@ -406,7 +407,7 @@ private void scheduleSample() { sync.completeExceptionally(e); e.printStackTrace(); } - }, params.gossipDuration().toNanos(), TimeUnit.NANOSECONDS); + }, log)), params.gossipDuration().toNanos(), TimeUnit.NANOSECONDS); } private void scheduleViewChainCompletion(AtomicReference start, ULong to) { @@ -418,14 +419,14 @@ private void scheduleViewChainCompletion(AtomicReference start, ULong to) } log.info("Scheduling view chain completion ({} to {}) duration: {} on: {}", start, to, params.gossipDuration(), params.member().getId()); - scheduler.schedule(() -> { + scheduler.schedule(() -> Thread.ofVirtual().start(Utils.wrapped(() -> { try { completeViewChain(start, to); } catch (Throwable e) { log.error("Cannot execute completeViewChain on: {}", params.member().getId()); sync.completeExceptionally(e); } - }, params.gossipDuration().toNanos(), TimeUnit.NANOSECONDS); + }, log)), params.gossipDuration().toNanos(), TimeUnit.NANOSECONDS); } private boolean synchronize(Optional futureSailor, HashMap votes, diff --git a/choam/src/main/java/com/salesforce/apollo/choam/support/CheckpointAssembler.java b/choam/src/main/java/com/salesforce/apollo/choam/support/CheckpointAssembler.java index ca9f2c1293..ece562b485 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/support/CheckpointAssembler.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/support/CheckpointAssembler.java @@ -6,13 +6,13 @@ */ package com.salesforce.apollo.choam.support; -import com.salesfoce.apollo.choam.proto.Checkpoint; -import com.salesfoce.apollo.choam.proto.CheckpointReplication; -import com.salesfoce.apollo.choam.proto.CheckpointSegments; import com.salesforce.apollo.archipelago.RouterImpl.CommonCommunications; import com.salesforce.apollo.bloomFilters.BloomFilter; import com.salesforce.apollo.choam.comm.Concierge; import com.salesforce.apollo.choam.comm.Terminal; +import com.salesforce.apollo.choam.proto.Checkpoint; +import com.salesforce.apollo.choam.proto.CheckpointReplication; +import com.salesforce.apollo.choam.proto.CheckpointSegments; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.HexBloom; @@ -21,6 +21,7 @@ import com.salesforce.apollo.membership.SigningMember; import com.salesforce.apollo.ring.RingIterator; import com.salesforce.apollo.utils.Entropy; +import com.salesforce.apollo.utils.Utils; import org.h2.mvstore.MVMap; import org.joou.ULong; import org.slf4j.Logger; @@ -77,6 +78,12 @@ public CompletableFuture assemble(ScheduledExecutorService sche return assembled; } + private void assembled(CheckpointState cs) { + log.info("Assembled checkpoint: {} segments: {} crown: {} on: {}", height, checkpoint.getCount(), diadem, + member.getId()); + assembled.complete(cs); + } + private CheckpointReplication buildRequest() { long seed = Entropy.nextBitsStreamLong(); BloomFilter segmentsBff = new BloomFilter.IntBloomFilter(seed, checkpoint.getCount(), fpr); @@ -101,12 +108,6 @@ private boolean gossip(Optional futureSailor) { return true; } - private void assembled(CheckpointState cs) { - log.info("Assembled checkpoint: {} segments: {} crown: {} on: {}", height, checkpoint.getCount(), diadem, - member.getId()); - assembled.complete(cs); - } - private void gossip(ScheduledExecutorService scheduler, Duration duration) { if (assembled.isDone()) { return; @@ -115,9 +116,9 @@ private void gossip(ScheduledExecutorService scheduler, Duration duration) { member.getId()); var ringer = new RingIterator<>(frequency, context, member, comms, true, scheduler); ringer.iterate(randomCut(digestAlgorithm), (link, ring) -> gossip(link), - (tally, result, destination) -> gossip(result), - t -> scheduler.schedule(() -> gossip(scheduler, duration), duration.toMillis(), - TimeUnit.MILLISECONDS)); + (tally, result, destination) -> gossip(result), t -> scheduler.schedule( + () -> Thread.ofVirtual().start(Utils.wrapped(() -> gossip(scheduler, duration), log)), duration.toMillis(), + TimeUnit.MILLISECONDS)); } diff --git a/choam/src/main/java/com/salesforce/apollo/choam/support/CheckpointState.java b/choam/src/main/java/com/salesforce/apollo/choam/support/CheckpointState.java index 63a3899a77..34b35e43a6 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/support/CheckpointState.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/support/CheckpointState.java @@ -7,8 +7,8 @@ package com.salesforce.apollo.choam.support; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.choam.proto.Checkpoint; -import com.salesfoce.apollo.choam.proto.Slice; +import com.salesforce.apollo.choam.proto.Checkpoint; +import com.salesforce.apollo.choam.proto.Slice; import com.salesforce.apollo.bloomFilters.BloomFilter; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.HexBloom; diff --git a/choam/src/main/java/com/salesforce/apollo/choam/support/DigestType.java b/choam/src/main/java/com/salesforce/apollo/choam/support/DigestType.java index 0d32964229..2dd8f3d551 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/support/DigestType.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/support/DigestType.java @@ -6,16 +6,14 @@ */ package com.salesforce.apollo.choam.support; -import java.nio.ByteBuffer; - +import com.salesforce.apollo.cryptography.Digest; import org.h2.mvstore.WriteBuffer; import org.h2.mvstore.type.BasicDataType; -import com.salesforce.apollo.cryptography.Digest; +import java.nio.ByteBuffer; /** * @author hal.hildebrand - * */ public class DigestType extends BasicDataType { @@ -41,8 +39,7 @@ public Digest read(ByteBuffer buff) { } @Override - public void write(WriteBuffer buff, Digest obj) { - Digest digest = (Digest) obj; + public void write(WriteBuffer buff, Digest digest) { buff.put(digest.getAlgorithm().digestCode()); for (long l : digest.getLongs()) { buff.putLong(l); diff --git a/choam/src/main/java/com/salesforce/apollo/choam/support/HashedBlock.java b/choam/src/main/java/com/salesforce/apollo/choam/support/HashedBlock.java index d5cd7109d6..7835dada50 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/support/HashedBlock.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/support/HashedBlock.java @@ -14,31 +14,28 @@ import org.joou.Unsigned; import com.google.protobuf.Message; -import com.salesfoce.apollo.choam.proto.Block; -import com.salesfoce.apollo.choam.proto.CertifiedBlock; -import com.salesfoce.apollo.choam.proto.Header; +import com.salesforce.apollo.choam.proto.Block; +import com.salesforce.apollo.choam.proto.CertifiedBlock; +import com.salesforce.apollo.choam.proto.Header; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; public class HashedBlock implements Comparable { - public static class NullBlock extends HashedBlock { + public final Block block; + public final Digest hash; - public NullBlock(DigestAlgorithm algo) { - super(algo.getOrigin(), null); - } + public HashedBlock(DigestAlgorithm digestAlgorithm, Block block) { + this(digestAlgorithm.digest(block.toByteString()), block); + } - @Override - public int compareTo(HashedBlock o) { - if (this == o) { - return 0; - } - return -1; - } + HashedBlock(Digest hash) { + this.hash = hash; + block = null; + } - @Override - public ULong height() { - return null; - } + HashedBlock(Digest hash, Block block) { + this.hash = hash; + this.block = block; } public static Header buildHeader(DigestAlgorithm digestAlgorithm, Message body, Digest previous, ULong height, @@ -96,23 +93,6 @@ public static ULong height(CertifiedBlock cb) { return height(cb.getBlock()); } - public final Block block; - public final Digest hash; - - public HashedBlock(DigestAlgorithm digestAlgorithm, Block block) { - this(digestAlgorithm.digest(block.toByteString()), block); - } - - HashedBlock(Digest hash) { - this.hash = hash; - block = null; - } - - HashedBlock(Digest hash, Block block) { - this.hash = hash; - this.block = block; - } - @Override public int compareTo(HashedBlock o) { return hash.equals(o.hash) ? 0 : height().compareTo(o.height()); @@ -130,4 +110,24 @@ public ULong height() { public String toString() { return "hb" + hash.toString() + " height: " + height(); } + + public static class NullBlock extends HashedBlock { + + public NullBlock(DigestAlgorithm algo) { + super(algo.getOrigin(), null); + } + + @Override + public int compareTo(HashedBlock o) { + if (this == o) { + return 0; + } + return -1; + } + + @Override + public ULong height() { + return null; + } + } } diff --git a/choam/src/main/java/com/salesforce/apollo/choam/support/HashedCertifiedBlock.java b/choam/src/main/java/com/salesforce/apollo/choam/support/HashedCertifiedBlock.java index 8b0b68adb9..cb4b4eaa80 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/support/HashedCertifiedBlock.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/support/HashedCertifiedBlock.java @@ -8,35 +8,14 @@ import org.joou.ULong; -import com.salesfoce.apollo.choam.proto.CertifiedBlock; +import com.salesforce.apollo.choam.proto.CertifiedBlock; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; /** * @author hal.hildebrand - * */ public class HashedCertifiedBlock extends HashedBlock { - public static class NullBlock extends HashedCertifiedBlock { - - public NullBlock(DigestAlgorithm algo) { - super(algo.getOrigin()); - } - - @Override - public int compareTo(HashedBlock o) { - if (this == o) { - return 0; - } - return -1; - } - - @Override - public ULong height() { - return null; - } - } - public final CertifiedBlock certifiedBlock; public HashedCertifiedBlock(DigestAlgorithm digestAlgorithm, CertifiedBlock block) { @@ -57,4 +36,24 @@ private HashedCertifiedBlock(Digest hash, CertifiedBlock block) { public String toString() { return "cb" + hash.toString() + " height: " + height(); } + + public static class NullBlock extends HashedCertifiedBlock { + + public NullBlock(DigestAlgorithm algo) { + super(algo.getOrigin()); + } + + @Override + public int compareTo(HashedBlock o) { + if (this == o) { + return 0; + } + return -1; + } + + @Override + public ULong height() { + return null; + } + } } diff --git a/choam/src/main/java/com/salesforce/apollo/choam/support/Store.java b/choam/src/main/java/com/salesforce/apollo/choam/support/Store.java index 1bd281dcfb..fa21e05f18 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/support/Store.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/support/Store.java @@ -7,7 +7,7 @@ package com.salesforce.apollo.choam.support; import com.google.protobuf.InvalidProtocolBufferException; -import com.salesfoce.apollo.choam.proto.*; +import com.salesforce.apollo.choam.proto.*; import com.salesforce.apollo.bloomFilters.BloomFilter; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; diff --git a/choam/src/main/java/com/salesforce/apollo/choam/support/SubmittedTransaction.java b/choam/src/main/java/com/salesforce/apollo/choam/support/SubmittedTransaction.java index a830c5585f..27827bd07e 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/support/SubmittedTransaction.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/support/SubmittedTransaction.java @@ -7,7 +7,7 @@ package com.salesforce.apollo.choam.support; import com.codahale.metrics.Timer; -import com.salesfoce.apollo.choam.proto.Transaction; +import com.salesforce.apollo.choam.proto.Transaction; import com.salesforce.apollo.cryptography.Digest; import org.joou.ULong; diff --git a/choam/src/main/java/com/salesforce/apollo/choam/support/TxDataSource.java b/choam/src/main/java/com/salesforce/apollo/choam/support/TxDataSource.java index cd91da18a0..1e89de4b1c 100644 --- a/choam/src/main/java/com/salesforce/apollo/choam/support/TxDataSource.java +++ b/choam/src/main/java/com/salesforce/apollo/choam/support/TxDataSource.java @@ -17,39 +17,35 @@ import org.slf4j.LoggerFactory; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.choam.proto.Reassemble; -import com.salesfoce.apollo.choam.proto.Transaction; -import com.salesfoce.apollo.choam.proto.UnitData; -import com.salesfoce.apollo.choam.proto.Validate; +import com.salesforce.apollo.choam.proto.Reassemble; +import com.salesforce.apollo.choam.proto.Transaction; +import com.salesforce.apollo.choam.proto.UnitData; +import com.salesforce.apollo.choam.proto.Validate; import com.salesforce.apollo.ethereal.DataSource; import com.salesforce.apollo.membership.Member; /** - * - * The data source for CHOAM. Provides back pressure to the caller when the - * capacity of the receiver is exceeded. This data source has a fixed capacity - * and produces a data packet up to the maximum byte size allowed, if the - * receiver has available data. Each time the data is pulled from the receiver, - * the remaining capacity is reduced by the max buffer size. The receiver will - * not accept any more data after the capacity has been used, regardless of - * whether there is space available. - * - * @author hal.hildebrand + * The data source for CHOAM. Provides back pressure to the caller when the capacity of the receiver is exceeded. This + * data source has a fixed capacity and produces a data packet up to the maximum byte size allowed, if the receiver has + * available data. Each time the data is pulled from the receiver, the remaining capacity is reduced by the max buffer + * size. The receiver will not accept any more data after the capacity has been used, regardless of whether there is + * space available. * + * @author hal.hildebrand */ public class TxDataSource implements DataSource { private final static Logger log = LoggerFactory.getLogger(TxDataSource.class); - private final Duration batchInterval; - private volatile Thread blockingThread; - private final AtomicBoolean draining = new AtomicBoolean(); - private final ExponentialBackoffPolicy drainPolicy; - private final Member member; - private final ChoamMetrics metrics; - private final BatchingQueue processing; - private final BlockingQueue reassemblies = new LinkedBlockingQueue<>(); - private final BlockingQueue validations = new LinkedBlockingQueue<>(); + private final Duration batchInterval; + private final AtomicBoolean draining = new AtomicBoolean(); + private final ExponentialBackoffPolicy drainPolicy; + private final Member member; + private final ChoamMetrics metrics; + private final BatchingQueue processing; + private final BlockingQueue reassemblies = new LinkedBlockingQueue<>(); + private final BlockingQueue validations = new LinkedBlockingQueue<>(); + private volatile Thread blockingThread; public TxDataSource(Member member, int maxElements, ChoamMetrics metrics, int maxBatchByteSize, Duration batchInterval, int maxBatchCount, ExponentialBackoffPolicy drainPolicy) { @@ -91,8 +87,8 @@ public ByteString getData() { if (draining.get()) { var target = Instant.now().plus(drainPolicy.nextBackoff()); - while (target.isAfter(Instant.now()) && builder.getReassembliesCount() == 0 && - builder.getValidationsCount() == 0) { + while (target.isAfter(Instant.now()) && builder.getReassembliesCount() == 0 + && builder.getValidationsCount() == 0) { // rinse and repeat r = new ArrayList(); reassemblies.drainTo(r); diff --git a/choam/src/test/java/com/salesforce/apollo/choam/GenesisAssemblyTest.java b/choam/src/test/java/com/salesforce/apollo/choam/GenesisAssemblyTest.java index 6036622bf7..95d681d14f 100644 --- a/choam/src/test/java/com/salesforce/apollo/choam/GenesisAssemblyTest.java +++ b/choam/src/test/java/com/salesforce/apollo/choam/GenesisAssemblyTest.java @@ -6,8 +6,8 @@ */ package com.salesforce.apollo.choam; -import com.salesfoce.apollo.choam.proto.*; -import com.salesfoce.apollo.cryptography.proto.PubKey; +import com.salesforce.apollo.choam.proto.*; +import com.salesforce.apollo.cryptography.proto.PubKey; import com.salesforce.apollo.archipelago.LocalServer; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.ServerConnectionCache; diff --git a/choam/src/test/java/com/salesforce/apollo/choam/MembershipTests.java b/choam/src/test/java/com/salesforce/apollo/choam/MembershipTests.java index 04b5d4fd08..87dd3404f7 100644 --- a/choam/src/test/java/com/salesforce/apollo/choam/MembershipTests.java +++ b/choam/src/test/java/com/salesforce/apollo/choam/MembershipTests.java @@ -6,7 +6,7 @@ */ package com.salesforce.apollo.choam; -import com.salesfoce.apollo.choam.proto.Transaction; +import com.salesforce.apollo.choam.proto.Transaction; import com.salesforce.apollo.archipelago.LocalServer; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.ServerConnectionCache; diff --git a/choam/src/test/java/com/salesforce/apollo/choam/SessionTest.java b/choam/src/test/java/com/salesforce/apollo/choam/SessionTest.java index 7ad879be30..7625aa1c23 100644 --- a/choam/src/test/java/com/salesforce/apollo/choam/SessionTest.java +++ b/choam/src/test/java/com/salesforce/apollo/choam/SessionTest.java @@ -13,13 +13,12 @@ import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.Message; -import com.salesfoce.apollo.choam.proto.Block; -import com.salesfoce.apollo.choam.proto.CertifiedBlock; -import com.salesfoce.apollo.choam.proto.Header; -import com.salesfoce.apollo.choam.proto.SubmitResult; -import com.salesfoce.apollo.choam.proto.SubmitResult.Result; -import com.salesfoce.apollo.test.proto.ByteMessage; import com.salesforce.apollo.choam.Parameters.RuntimeParameters; +import com.salesforce.apollo.choam.proto.Block; +import com.salesforce.apollo.choam.proto.CertifiedBlock; +import com.salesforce.apollo.choam.proto.Header; +import com.salesforce.apollo.choam.proto.SubmitResult; +import com.salesforce.apollo.choam.proto.SubmitResult.Result; import com.salesforce.apollo.choam.support.HashedCertifiedBlock; import com.salesforce.apollo.choam.support.InvalidTransaction; import com.salesforce.apollo.choam.support.SubmittedTransaction; @@ -31,6 +30,7 @@ import com.salesforce.apollo.stereotomy.StereotomyImpl; import com.salesforce.apollo.stereotomy.mem.MemKERL; import com.salesforce.apollo.stereotomy.mem.MemKeyStore; +import com.salesforce.apollo.test.proto.ByteMessage; import io.grpc.StatusRuntimeException; import org.junit.jupiter.api.Test; import org.slf4j.LoggerFactory; diff --git a/choam/src/test/java/com/salesforce/apollo/choam/TestCHOAM.java b/choam/src/test/java/com/salesforce/apollo/choam/TestCHOAM.java index 768afcb295..021ed26a9a 100644 --- a/choam/src/test/java/com/salesforce/apollo/choam/TestCHOAM.java +++ b/choam/src/test/java/com/salesforce/apollo/choam/TestCHOAM.java @@ -8,7 +8,7 @@ import com.codahale.metrics.ConsoleReporter; import com.codahale.metrics.MetricRegistry; -import com.salesfoce.apollo.choam.proto.Transaction; +import com.salesforce.apollo.choam.proto.Transaction; import com.salesforce.apollo.archipelago.LocalServer; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.ServerConnectionCache; diff --git a/choam/src/test/java/com/salesforce/apollo/choam/TestChain.java b/choam/src/test/java/com/salesforce/apollo/choam/TestChain.java index 43ba1b1c5a..42e4de032e 100644 --- a/choam/src/test/java/com/salesforce/apollo/choam/TestChain.java +++ b/choam/src/test/java/com/salesforce/apollo/choam/TestChain.java @@ -6,7 +6,7 @@ */ package com.salesforce.apollo.choam; -import com.salesfoce.apollo.choam.proto.*; +import com.salesforce.apollo.choam.proto.*; import com.salesforce.apollo.choam.support.HashedCertifiedBlock; import com.salesforce.apollo.choam.support.Store; import com.salesforce.apollo.cryptography.DigestAlgorithm; diff --git a/choam/src/test/java/com/salesforce/apollo/choam/Transactioneer.java b/choam/src/test/java/com/salesforce/apollo/choam/Transactioneer.java index 78b762d1d9..eaa4fdc9c4 100644 --- a/choam/src/test/java/com/salesforce/apollo/choam/Transactioneer.java +++ b/choam/src/test/java/com/salesforce/apollo/choam/Transactioneer.java @@ -7,8 +7,8 @@ package com.salesforce.apollo.choam; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.test.proto.ByteMessage; import com.salesforce.apollo.choam.support.InvalidTransaction; +import com.salesforce.apollo.test.proto.ByteMessage; import com.salesforce.apollo.utils.Utils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/choam/src/test/java/com/salesforce/apollo/choam/ViewAssemblyTest.java b/choam/src/test/java/com/salesforce/apollo/choam/ViewAssemblyTest.java index 0086d9c7b6..82e3d0bff8 100644 --- a/choam/src/test/java/com/salesforce/apollo/choam/ViewAssemblyTest.java +++ b/choam/src/test/java/com/salesforce/apollo/choam/ViewAssemblyTest.java @@ -2,9 +2,9 @@ import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; -import com.salesfoce.apollo.choam.proto.Reassemble; -import com.salesfoce.apollo.choam.proto.ViewMember; -import com.salesfoce.apollo.cryptography.proto.PubKey; +import com.salesforce.apollo.choam.proto.Reassemble; +import com.salesforce.apollo.choam.proto.ViewMember; +import com.salesforce.apollo.cryptography.proto.PubKey; import com.salesforce.apollo.archipelago.LocalServer; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.ServerConnectionCache; @@ -61,8 +61,8 @@ public class ViewAssemblyTest { private List controllers = new ArrayList<>(); private Map dataSources; private List gossipers = new ArrayList<>(); - private List members; - private Digest nextViewId; + private List members; + private Digest nextViewId; @AfterEach public void after() { diff --git a/choam/src/test/java/com/salesforce/apollo/choam/support/BootstrapperTest.java b/choam/src/test/java/com/salesforce/apollo/choam/support/BootstrapperTest.java index a24e3e0a6a..60bbc8b99a 100644 --- a/choam/src/test/java/com/salesforce/apollo/choam/support/BootstrapperTest.java +++ b/choam/src/test/java/com/salesforce/apollo/choam/support/BootstrapperTest.java @@ -6,9 +6,9 @@ */ package com.salesforce.apollo.choam.support; -import com.salesfoce.apollo.choam.proto.BlockReplication; -import com.salesfoce.apollo.choam.proto.Blocks; -import com.salesfoce.apollo.choam.proto.Initial; +import com.salesforce.apollo.choam.proto.BlockReplication; +import com.salesforce.apollo.choam.proto.Blocks; +import com.salesforce.apollo.choam.proto.Initial; import com.salesforce.apollo.archipelago.RouterImpl.CommonCommunications; import com.salesforce.apollo.bloomFilters.BloomFilter; import com.salesforce.apollo.choam.Parameters; diff --git a/choam/src/test/java/com/salesforce/apollo/choam/support/CheckpointAssemblerTest.java b/choam/src/test/java/com/salesforce/apollo/choam/support/CheckpointAssemblerTest.java index 39e390bbf4..5d7fe0e2f6 100644 --- a/choam/src/test/java/com/salesforce/apollo/choam/support/CheckpointAssemblerTest.java +++ b/choam/src/test/java/com/salesforce/apollo/choam/support/CheckpointAssemblerTest.java @@ -6,10 +6,10 @@ */ package com.salesforce.apollo.choam.support; -import com.salesfoce.apollo.choam.proto.Checkpoint; -import com.salesfoce.apollo.choam.proto.CheckpointReplication; -import com.salesfoce.apollo.choam.proto.CheckpointSegments; -import com.salesfoce.apollo.choam.proto.Slice; +import com.salesforce.apollo.choam.proto.Checkpoint; +import com.salesforce.apollo.choam.proto.CheckpointReplication; +import com.salesforce.apollo.choam.proto.CheckpointSegments; +import com.salesforce.apollo.choam.proto.Slice; import com.salesforce.apollo.archipelago.RouterImpl.CommonCommunications; import com.salesforce.apollo.bloomFilters.BloomFilter; import com.salesforce.apollo.choam.CHOAM; diff --git a/choam/src/test/java/com/salesforce/apollo/choam/support/TxDataSourceTest.java b/choam/src/test/java/com/salesforce/apollo/choam/support/TxDataSourceTest.java index c2e09fc4df..4f89b2f7f7 100644 --- a/choam/src/test/java/com/salesforce/apollo/choam/support/TxDataSourceTest.java +++ b/choam/src/test/java/com/salesforce/apollo/choam/support/TxDataSourceTest.java @@ -16,7 +16,7 @@ import org.junit.jupiter.api.Test; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.choam.proto.Transaction; +import com.salesforce.apollo.choam.proto.Transaction; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; import com.salesforce.apollo.stereotomy.StereotomyImpl; @@ -25,7 +25,6 @@ /** * @author hal.hildebrand - * */ public class TxDataSourceTest { @@ -34,9 +33,8 @@ public void func() throws Exception { var entropy = SecureRandom.getInstance("SHA1PRNG"); entropy.setSeed(new byte[] { 6, 6, 6 }); var stereotomy = new StereotomyImpl(new MemKeyStore(), new MemKERL(DigestAlgorithm.DEFAULT), entropy); - TxDataSource ds = new TxDataSource(new ControlledIdentifierMember(stereotomy.newIdentifier()), 100, null, - 1024, Duration.ofMillis(100), 100, - ExponentialBackoffPolicy.newBuilder().build()); + TxDataSource ds = new TxDataSource(new ControlledIdentifierMember(stereotomy.newIdentifier()), 100, null, 1024, + Duration.ofMillis(100), 100, ExponentialBackoffPolicy.newBuilder().build()); Transaction tx = Transaction.newBuilder() .setContent(ByteString.copyFromUtf8("Give me food or give me slack or kill me")) .build(); diff --git a/cryptography/pom.xml b/cryptography/pom.xml index 45e9f190fb..4e9e15ab44 100644 --- a/cryptography/pom.xml +++ b/cryptography/pom.xml @@ -34,10 +34,6 @@ org.apache.commons commons-math3 - - org.slf4j - slf4j-api - com.fasterxml.jackson.core jackson-databind diff --git a/cryptography/src/main/java/com/salesforce/apollo/bloomFilters/BloomFilter.java b/cryptography/src/main/java/com/salesforce/apollo/bloomFilters/BloomFilter.java index 5ae590c25e..6208126651 100644 --- a/cryptography/src/main/java/com/salesforce/apollo/bloomFilters/BloomFilter.java +++ b/cryptography/src/main/java/com/salesforce/apollo/bloomFilters/BloomFilter.java @@ -6,13 +6,13 @@ */ package com.salesforce.apollo.bloomFilters; -import com.salesfoce.apollo.cryptography.proto.Biff; +import com.salesforce.apollo.cryptography.proto.Biff; import com.salesforce.apollo.cryptography.Digest; import org.joou.ULong; import java.util.BitSet; -import static com.salesfoce.apollo.cryptography.proto.Biff.Type.*; +import static com.salesforce.apollo.cryptography.proto.Biff.Type.*; /** * Simplified Bloom filter for multiple types, with setable seeds and other parameters. diff --git a/cryptography/src/main/java/com/salesforce/apollo/bloomFilters/BloomWindow.java b/cryptography/src/main/java/com/salesforce/apollo/bloomFilters/BloomWindow.java index 8aa10724f2..05d9e40781 100644 --- a/cryptography/src/main/java/com/salesforce/apollo/bloomFilters/BloomWindow.java +++ b/cryptography/src/main/java/com/salesforce/apollo/bloomFilters/BloomWindow.java @@ -6,7 +6,7 @@ */ package com.salesforce.apollo.bloomFilters; -import com.salesfoce.apollo.cryptography.proto.Biff; +import com.salesforce.apollo.cryptography.proto.Biff; import com.salesforce.apollo.utils.Entropy; import java.util.concurrent.atomic.AtomicInteger; diff --git a/cryptography/src/main/java/com/salesforce/apollo/cryptography/Digest.java b/cryptography/src/main/java/com/salesforce/apollo/cryptography/Digest.java index 3bc5c7a378..f4887aa094 100644 --- a/cryptography/src/main/java/com/salesforce/apollo/cryptography/Digest.java +++ b/cryptography/src/main/java/com/salesforce/apollo/cryptography/Digest.java @@ -6,8 +6,8 @@ */ package com.salesforce.apollo.cryptography; -import com.salesfoce.apollo.cryptography.proto.Digeste; -import com.salesfoce.apollo.cryptography.proto.Digeste.Builder; +import com.salesforce.apollo.cryptography.proto.Digeste; +import com.salesforce.apollo.cryptography.proto.Digeste.Builder; import com.salesforce.apollo.bloomFilters.Hash; import com.salesforce.apollo.utils.BUZ; import com.salesforce.apollo.utils.Hex; diff --git a/cryptography/src/main/java/com/salesforce/apollo/cryptography/DigestAlgorithm.java b/cryptography/src/main/java/com/salesforce/apollo/cryptography/DigestAlgorithm.java index e06fa97d73..25bb3adb77 100644 --- a/cryptography/src/main/java/com/salesforce/apollo/cryptography/DigestAlgorithm.java +++ b/cryptography/src/main/java/com/salesforce/apollo/cryptography/DigestAlgorithm.java @@ -245,6 +245,11 @@ public byte[] hashOf(InputStream is) { return EMPTY; } }, SHA2_256 { + @Override + public String algorithmName() { + return "SHA-256"; + } + @Override public byte digestCode() { return 6; @@ -258,6 +263,11 @@ public int digestLength() { }, SHA2_512 { + @Override + public String algorithmName() { + return "SHA-512"; + } + @Override public byte digestCode() { return 7; @@ -271,6 +281,11 @@ public int digestLength() { }, SHA3_256 { + @Override + public String algorithmName() { + return "SHA3-256"; + } + @Override public byte digestCode() { return 8; @@ -282,6 +297,11 @@ public int digestLength() { } }, SHA3_512 { + @Override + public String algorithmName() { + return "SHA3-512"; + } + @Override public byte digestCode() { return 9; diff --git a/cryptography/src/main/java/com/salesforce/apollo/cryptography/HexBloom.java b/cryptography/src/main/java/com/salesforce/apollo/cryptography/HexBloom.java index 794c5df68e..1149fdeebc 100644 --- a/cryptography/src/main/java/com/salesforce/apollo/cryptography/HexBloom.java +++ b/cryptography/src/main/java/com/salesforce/apollo/cryptography/HexBloom.java @@ -6,7 +6,7 @@ */ package com.salesforce.apollo.cryptography; -import com.salesfoce.apollo.cryptography.proto.HexBloome; +import com.salesforce.apollo.cryptography.proto.HexBloome; import com.salesforce.apollo.bloomFilters.BloomFilter; import com.salesforce.apollo.bloomFilters.Primes; @@ -520,11 +520,14 @@ public Accumulator(int cardinality, int crowns, Digest initial) { this(cardinality, crowns, initial, DEFAULT_FPR); } - /** - * @return the hash digest of the wrapped crowns - */ - public Digest compactWrapped() { - return compactWrapped(hashWraps(accumulators.size())); + public void add(Digest digest) { + if (currentCount == cardinality) { + throw new IllegalArgumentException("Current count already equal to cardinality: " + cardinality); + } + currentCount++; + for (int i = 0; i < accumulators.size(); i++) { + accumulators.get(i).accumulateAndGet(hashes.get(i).apply(digest), (a, b) -> a.xor(b)); + } } /** @@ -544,6 +547,17 @@ public Digest compactWrapped(List> hashes) { .reduce(algorithm.getOrigin(), (a, b) -> a.xor(b)); } + /** + * @return the hash digest of the wrapped crowns + */ + public Digest compactWrapped() { + return compactWrapped(hashWraps(accumulators.size())); + } + + public List crowns() { + return accumulators.stream().map(ar -> ar.get()).toList(); + } + public List wrappedCrowns() { return wrappedCrowns(hashWraps(accumulators.size())); } @@ -553,20 +567,6 @@ public List wrappedCrowns(List> wrapingHash) { .mapToObj(i -> wrapingHash.get(i).apply(accumulators.get(i).get())) .toList(); } - - public void add(Digest digest) { - if (currentCount == cardinality) { - throw new IllegalArgumentException("Current count already equal to cardinality: " + cardinality); - } - currentCount++; - for (int i = 0; i < accumulators.size(); i++) { - accumulators.get(i).accumulateAndGet(hashes.get(i).apply(digest), (a, b) -> a.xor(b)); - } - } - - public List crowns() { - return accumulators.stream().map(ar -> ar.get()).toList(); - } } public static class HexAccumulator extends Accumulator { diff --git a/cryptography/src/main/java/com/salesforce/apollo/cryptography/JohnHancock.java b/cryptography/src/main/java/com/salesforce/apollo/cryptography/JohnHancock.java index ad7f09fbf7..809839b4d5 100644 --- a/cryptography/src/main/java/com/salesforce/apollo/cryptography/JohnHancock.java +++ b/cryptography/src/main/java/com/salesforce/apollo/cryptography/JohnHancock.java @@ -7,8 +7,8 @@ package com.salesforce.apollo.cryptography; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.cryptography.proto.Sig; import com.salesforce.apollo.cryptography.Verifier.Filtered; +import com.salesforce.apollo.cryptography.proto.Sig; import com.salesforce.apollo.utils.Hex; import org.joou.ULong; import org.slf4j.LoggerFactory; @@ -100,6 +100,10 @@ public byte[][] getBytes() { return bytes; } + public ULong getSequenceNumber() { + return sequenceNumber; + } + @Override public int hashCode() { final int prime = 31; @@ -165,10 +169,7 @@ public boolean verify(SigningThreshold threshold, Map keys, } int[] arrIndexes = verifiedSignatures.stream().mapToInt(i -> i.intValue()).toArray(); - return SigningThreshold.thresholdMet(threshold, arrIndexes); - } - - public ULong getSequenceNumber() { - return sequenceNumber; + var thresholdMet = SigningThreshold.thresholdMet(threshold, arrIndexes); + return thresholdMet; } } diff --git a/cryptography/src/main/java/com/salesforce/apollo/cryptography/QualifiedBase64.java b/cryptography/src/main/java/com/salesforce/apollo/cryptography/QualifiedBase64.java index cbc76c299e..d18390fe33 100644 --- a/cryptography/src/main/java/com/salesforce/apollo/cryptography/QualifiedBase64.java +++ b/cryptography/src/main/java/com/salesforce/apollo/cryptography/QualifiedBase64.java @@ -1,9 +1,9 @@ package com.salesforce.apollo.cryptography; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.cryptography.proto.Digeste; -import com.salesfoce.apollo.cryptography.proto.PubKey; -import com.salesfoce.apollo.cryptography.proto.Sig; +import com.salesforce.apollo.cryptography.proto.Digeste; +import com.salesforce.apollo.cryptography.proto.PubKey; +import com.salesforce.apollo.cryptography.proto.Sig; import java.security.PublicKey; import java.util.Arrays; diff --git a/cryptography/src/main/java/com/salesforce/apollo/cryptography/Verifier.java b/cryptography/src/main/java/com/salesforce/apollo/cryptography/Verifier.java index 16e4c6478a..a76da3bef9 100644 --- a/cryptography/src/main/java/com/salesforce/apollo/cryptography/Verifier.java +++ b/cryptography/src/main/java/com/salesforce/apollo/cryptography/Verifier.java @@ -6,6 +6,9 @@ */ package com.salesforce.apollo.cryptography; +import com.google.protobuf.ByteString; +import com.salesforce.apollo.utils.BbBackedInputStream; + import java.io.InputStream; import java.nio.ByteBuffer; import java.security.PublicKey; @@ -13,93 +16,12 @@ import java.util.List; import java.util.Map; -import com.google.protobuf.ByteString; -import com.salesforce.apollo.utils.BbBackedInputStream; - /** * Verifies a signature using a given key - * - * @author hal.hildebrand * + * @author hal.hildebrand */ public interface Verifier { - class DefaultVerifier implements Verifier { - public static Map mapped(List list) { - var mapped = new HashMap(); - for (int i = 0; i < list.size(); i++) { - mapped.put(i, list.get(i)); - } - return mapped; - } - - public static Map mapped(PublicKey[] array) { - var mapped = new HashMap(); - for (int i = 0; i < array.length; i++) { - mapped.put(i, array[i]); - } - return mapped; - } - - private final Map keys; - - public DefaultVerifier(List keys) { - this(mapped(keys)); - } - - public DefaultVerifier(Map keys) { - this.keys = keys; - } - - public DefaultVerifier(PublicKey key) { - this(mapped(new PublicKey[] { key })); - } - - public DefaultVerifier(PublicKey[] keys) { - this(mapped(keys)); - } - - @Override - public Filtered filtered(SigningThreshold threshold, JohnHancock signature, InputStream message) { - return signature.filter(threshold, keys, message); - } - - @Override - public String toString() { - return "V[" + keys.values().stream().map(k -> ":" + k.getEncoded()).toList() + "]"; - } - - @Override - public boolean verify(JohnHancock signature, InputStream message) { - return verify(SigningThreshold.unweighted(keys.size()), signature, message); - } - - @Override - public boolean verify(SigningThreshold threshold, JohnHancock signature, InputStream message) { - return signature.verify(threshold, keys, message); - } - } - - class MockVerifier implements Verifier { - - @Override - public Filtered filtered(SigningThreshold threshold, JohnHancock signature, InputStream message) { - return new Filtered(true, signature.signatureCount(), signature); - } - - @Override - public boolean verify(JohnHancock signature, InputStream message) { - return true; - } - - @Override - public boolean verify(SigningThreshold threshold, JohnHancock signature, InputStream message) { - return true; - } - - } - - record Filtered(boolean verified, int validating, JohnHancock filtered) {} - default Filtered filtered(SigningThreshold threshold, JohnHancock signature, byte[]... message) { return filtered(threshold, signature, BbBackedInputStream.aggregate(message)); } @@ -165,4 +87,82 @@ default boolean verify(SigningThreshold threshold, JohnHancock signature, List keys; + + public DefaultVerifier(List keys) { + this(mapped(keys)); + } + + public DefaultVerifier(Map keys) { + this.keys = keys; + } + + public DefaultVerifier(PublicKey key) { + this(mapped(new PublicKey[] { key })); + } + + public DefaultVerifier(PublicKey[] keys) { + this(mapped(keys)); + } + + public static Map mapped(List list) { + var mapped = new HashMap(); + for (int i = 0; i < list.size(); i++) { + mapped.put(i, list.get(i)); + } + return mapped; + } + + public static Map mapped(PublicKey[] array) { + var mapped = new HashMap(); + for (int i = 0; i < array.length; i++) { + mapped.put(i, array[i]); + } + return mapped; + } + + @Override + public Filtered filtered(SigningThreshold threshold, JohnHancock signature, InputStream message) { + return signature.filter(threshold, keys, message); + } + + @Override + public String toString() { + return "V[" + keys.values().stream().map(k -> ":" + k.getEncoded()).toList() + "]"; + } + + @Override + public boolean verify(JohnHancock signature, InputStream message) { + return verify(SigningThreshold.unweighted(keys.size()), signature, message); + } + + @Override + public boolean verify(SigningThreshold threshold, JohnHancock signature, InputStream message) { + return signature.verify(threshold, keys, message); + } + } + + public class MockVerifier implements Verifier { + + @Override + public Filtered filtered(SigningThreshold threshold, JohnHancock signature, InputStream message) { + return new Filtered(true, signature.signatureCount(), signature); + } + + @Override + public boolean verify(JohnHancock signature, InputStream message) { + return true; + } + + @Override + public boolean verify(SigningThreshold threshold, JohnHancock signature, InputStream message) { + return true; + } + + } + + record Filtered(boolean verified, int validating, JohnHancock filtered) { + } } diff --git a/cryptography/src/test/java/com/salesforce/apollo/bloomFilters/BloomWindowTest.java b/cryptography/src/test/java/com/salesforce/apollo/bloomFilters/BloomWindowTest.java index cae4bf65a1..bcf7fd97b8 100644 --- a/cryptography/src/test/java/com/salesforce/apollo/bloomFilters/BloomWindowTest.java +++ b/cryptography/src/test/java/com/salesforce/apollo/bloomFilters/BloomWindowTest.java @@ -1,6 +1,6 @@ package com.salesforce.apollo.bloomFilters; -import com.salesfoce.apollo.cryptography.proto.Biff; +import com.salesforce.apollo.cryptography.proto.Biff; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import org.junit.jupiter.api.Test; diff --git a/delphinius/README.md b/delphinius/README.md index 2bc3b823d2..6b47a06fe6 100644 --- a/delphinius/README.md +++ b/delphinius/README.md @@ -1,44 +1,87 @@ # Delphinius + Access Control ____ -Apollo Delphinius is a Relationship Based Access Control and is a [Google Zanzibar](https://research.google/pubs/pub48190/) clone implemented in SQL and run on Apollo's SQL state machine. - +Apollo Delphinius is a Relationship Based Access Control and is +a [Google Zanzibar](https://research.google/pubs/pub48190/) "like" clone implemented in SQL and run on Apollo's SQL +state machine. ## Model -Delphinius presents a simple model for Access Control Lists. An ACL is an asserted Tuple of {Object, Relation, Subject}. Each of the tuple elements forms a seperate domain class. Each domain is further qualified with Namespaces. For example an Object is a tuple of {Namespace, Name}. Subject is likewise a tuple of {Namespace, Name}. -Each domain class is further arranged in a directed graphs of mappings. These mappings provide the mechanism to form containment sets. For example, we can define the Subject of {"foo", "Users"}. We can map another Subject {"foo", "Jale"} to {"foo", "Users"}. This could indicate that Subject {"foo", "Jale"} is a "member" of the group {"foo", "Users"}. While in this example, all the Subjects share the same namespace "foo", this is not required, and namespaces can be interlinked as required. +Delphinius presents a simple model for Access Control Lists. An ACL is an asserted Tuple of {Subject, Object}. +Each of the tuple elements forms a separate domain class and is further qualified with a Namespace. For example +an Object is a tuple of {Namespace, Name, Relation}. Subject is likewise a tuple of {Namespace, Name, Relation}. +Relation is simply {Namespace, Name}. + +Each domain class is further arranged in a directed graphs of mappings through parent/child relationships. These +mappings +provide the mechanism to form DAG containment sets. For example, we can define the Subject of {"foo", "Users"}. We can +map another Subject {"foo", "Jale"} to {"foo", "Users"}. This could indicate that Subject {"foo", "Jale"} is a "member" +of the group {"foo", "Users"}. While in this example, all the Subjects share the same namespace "foo", this is not +required, and namespaces can be interlinked as required. + +There are special, predefined NULL instances of the various domains + +* NULL Namespace +* NULL Relation +* NULL Subject +* Null Object +* NULL Assertion + +Both Subjects and Objects can have the NULL Relation, and all domains may be part of the NULL Namespace. -Assertions are a tuple of {Subject, Object}. This asserts that the Subject has a positive link to the Object. Remember, both the Subject and Object are {namespace, name, relation} tuples, so this express a great deal of flexibility. Recall as well that Subject and Object have an internal inference structure, expanding the assertion set based on these transitive relationships. The Check(Assertion) uses the expanded direct and transitive sets when evaluating. +Assertions are a tuple of {Subject, Object}. This asserts that the Subject has a positive link to the Object. Remember, +both the Subject and Object are {namespace, name, relation} tuples, so this express a great deal of flexibility. Recall +as well that Subject and Object have an internal inference structure, expanding the assertion set based on these +transitive relationships. The Check(Assertion) uses the expanded direct and transitive sets when evaluating. -Note that all domains - Object, Relation and Subject - are both Namespaced and hierarchically related. While this does allow for bewildering complexity, it is a natural and powerful model that allows concise modeling of rich access control patterns. +Note that all domains - Object, Relation and Subject - are both Namespaced and hierarchically related. While this does +allow for bewildering complexity, it appears to be a natural and powerful model that allows concise modeling of rich +access control patterns. -For a good explanation and visualization of the Zanzibar model, see the [Zanzibar Academy](https://zanzibar.academy). This site has a wonderful visual tutorial on the basics, the model and why it's a powerful model. Links to papers, open source implementations, etc. Well worth your time ;) +For a good explanation and visualization of the Zanzibar model, see the [Zanzibar Academy](https://zanzibar.academy). +This site has a wonderful visual tutorial on the basics, the model and why it's a powerful model. Links to papers, open +source implementations, etc. Well worth your time ;) ## API -The Oracle interface provides the following API: - * read(Object...) - return subjects with direct access to the objects - * read(Relation, Object...) - return subjects with direct access to the objects, filtered by relation predicate - * read(Subject...) - return objects with direct access from the subjects - * read(Relation, Subject...) - return objects with direct access from the subjects, filtered by relation predicate - * expand(Object...) - return subjects with transitive and direct access to the objects - * expand(Relation, Object...) - return subjects with transitive and direct access to the objects - * add(T) where T in {Object, Relation, Subject, Assertion) - Add the entity - * delete(T) where T in {Object, Relation, Subject, Assertion) - Delete the entity - * map(A, B) where A,B in {Object, Relation, Subject} - Map and create all inferred mappings from entity A to entity B - * remove(A, B) where A,B in {Object, Relation, Subject} - Remove mapping and all inferred mappings from entity A to entity B - * check(Assertion) - Check if the Assertion exists + +The __Oracle__ interface provides the following API: + +* read(Object...) - return subjects with direct access to the objects +* read(Relation, Object...) - return subjects with direct access to the objects, filtered by relation predicate +* read(Subject...) - return objects with direct access from the subjects +* read(Relation, Subject...) - return objects with direct access from the subjects, filtered by relation predicate +* expand(Object...) - return subjects with transitive and direct access to the objects +* expand(Relation, Object...) - return subjects with transitive and direct access to the objects +* add(T) where T in {Object, Relation, Subject, Assertion) - Add the entity +* delete(T) where T in {Object, Relation, Subject, Assertion) - Delete the entity +* map(A, B) where A,B in {Object, Relation, Subject} - Map and create all inferred mappings from entity A to entity B +* remove(A, B) where A,B in {Object, Relation, Subject} - Remove mapping and all inferred mappings from entity A to + entity B +* check(Assertion) - Check if the Assertion exists Currently, mappings are transitive as the system does not currently support relation rewrite sets. ## Design -Delphinius is implemented as a set of SQL tables and is loosely based on the wonderful work of [Kernal Erdogan](https://www.codeproject.com/Articles/30380/A-Fairly-Capable-Authorization-Sub-System-with-Row). The technique is, of course, as old as time and to get a good feel, see [Maintaining Transitive Closure of Graphs in SQL](https://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.910.3322&rep=rep1&type=pdf). Internally, the full closure sets of all the Domain DAGs are stored in one table - Edge. This strategy trades space for speed, as it is expected that the vast majority of operations performed will be Assertion checks. As such, Delphinius has a practical upper bound, as the DAG closure table blowout is potentially huge. The SQL to implement Delphinius is generic and should work on any other system, but translating the triggers and stored procedures used would have to be accomplished, so it's not a generic component, rather specialized for Apollo's use of the H2DB. -The system is designed to be used to implement row level security in the larger Apollo ecosystem, as well as smoothly integrating with Stereotomy identity and key management. +Delphinius is implemented as a set of SQL tables and is loosely based on the wonderful work +of [Kernal Erdogan](https://www.codeproject.com/Articles/30380/A-Fairly-Capable-Authorization-Sub-System-with-Row). The +technique is, of course, as old as time and to get a good feel, +see [Maintaining Transitive Closure of Graphs in SQL](https://homepages.inf.ed.ac.uk/libkin/papers/tc-sql.pdf). +Internally, the full closure sets of all the Domain DAGs are stored in one table - Edge. This strategy trades space for +speed, as it is expected that the vast majority of operations performed will be Assertion checks. As such, Delphinius +has a practical upper bound, as the DAG closure table blowout is potentially huge. The SQL to implement Delphinius is +generic and should work on any other system, but translating the triggers and stored procedures used would have to be +accomplished, so it's not a generic component, rather specialized for Apollo's use of the H2DB. + +The system is designed to be used to implement row level security in the larger Apollo ecosystem, as well as smoothly +integrating with Stereotomy identity and key management. ### Zookies? -Note that the time based features of Google Zanibar are blatantly missing. The intention is to use a function of block height, which is available in the SQL state machine. -My understanding (still unproven, of course!) is that this will function as intended for a Zookie. + +Note that the time based features of Google Zanibar are blatantly missing. The intention is to use a function of block +height, which is available in the SQL state machine. My assertion is that this will function as intended for a Zookie. ### Recursive Queries For Set Operations? -Soon, I hope... \ No newline at end of file + +Who knows? diff --git a/delphinius/src/main/java/com/salesforce/apollo/delphinius/AbstractOracle.java b/delphinius/src/main/java/com/salesforce/apollo/delphinius/AbstractOracle.java index c52b1739a6..33d02465e8 100644 --- a/delphinius/src/main/java/com/salesforce/apollo/delphinius/AbstractOracle.java +++ b/delphinius/src/main/java/com/salesforce/apollo/delphinius/AbstractOracle.java @@ -6,12 +6,13 @@ */ package com.salesforce.apollo.delphinius; -import static com.salesforce.apollo.delphinius.schema.tables.Assertion.ASSERTION; -import static com.salesforce.apollo.delphinius.schema.tables.Edge.EDGE; -import static com.salesforce.apollo.delphinius.schema.tables.Namespace.NAMESPACE; -import static com.salesforce.apollo.delphinius.schema.tables.Object.OBJECT; -import static com.salesforce.apollo.delphinius.schema.tables.Relation.RELATION; -import static com.salesforce.apollo.delphinius.schema.tables.Subject.SUBJECT; +import com.salesforce.apollo.delphinius.schema.tables.Edge; +import org.jooq.Record; +import org.jooq.*; +import org.jooq.exception.DataAccessException; +import org.jooq.impl.DSL; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.sql.Connection; import java.sql.SQLException; @@ -19,27 +20,17 @@ import java.util.List; import java.util.stream.Stream; -import org.jooq.DSLContext; -import org.jooq.Field; -import org.jooq.Name; -import org.jooq.Record; -import org.jooq.Record1; -import org.jooq.Record2; -import org.jooq.SQLDialect; -import org.jooq.SelectJoinStep; -import org.jooq.Table; -import org.jooq.exception.DataAccessException; -import org.jooq.impl.DSL; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.salesforce.apollo.delphinius.schema.tables.Edge; +import static com.salesforce.apollo.delphinius.schema.tables.Assertion.ASSERTION; +import static com.salesforce.apollo.delphinius.schema.tables.Edge.EDGE; +import static com.salesforce.apollo.delphinius.schema.tables.Namespace.NAMESPACE; +import static com.salesforce.apollo.delphinius.schema.tables.Object.OBJECT; +import static com.salesforce.apollo.delphinius.schema.tables.Relation.RELATION; +import static com.salesforce.apollo.delphinius.schema.tables.Subject.SUBJECT; /** * An Access Control Oracle - * - * @author hal.hildebrand * + * @author hal.hildebrand */ abstract public class AbstractOracle implements Oracle { @@ -50,25 +41,34 @@ abstract public class AbstractOracle implements Oracle { protected static final Field cParent = DSL.field(DSL.name("CANDIDATES", "PARENT"), Long.class); protected static final Edge E = EDGE.as("E"); protected static final Logger log = LoggerFactory.getLogger(AbstractOracle.class); - protected static final Name ROWZ = DSL.name("ROZ"); + protected static final Name ROWZ = DSL.name("ROWZ"); protected static final Table rowzTable = DSL.table(ROWZ); protected static final Table s1 = rowzTable.as("S1"); + protected static final Table s2 = rowzTable.as("S2"); + protected static final Table s3 = rowzTable.as("S3"); protected static final Field s1Child = DSL.field(DSL.name("S1", "CHILD"), Long.class); protected static final Field s1Parent = DSL.field(DSL.name("S1", "PARENT"), Long.class); - protected static final Table s2 = rowzTable.as("S2"); protected static final Field s2Child = DSL.field(DSL.name("S2", "CHILD"), Long.class); protected static final Field s2Parent = DSL.field(DSL.name("S2", "PARENT"), Long.class); - protected static final Table s3 = rowzTable.as("S3"); protected static final Field s3Child = DSL.field(DSL.name("S3", "CHILD"), Long.class); protected static final Field s3Parent = DSL.field(DSL.name("S3", "PARENT"), Long.class); protected static final Field sChild = DSL.field(DSL.name("SUSPECT", "CHILD"), Long.class); protected static final Field sParent = DSL.field(DSL.name("SUSPECT", "PARENT"), Long.class); protected static final Name suspect = DSL.name("SUSPECT"); + private final DSLContext dslCtx; + + public AbstractOracle(Connection connection) { + this(DSL.using(connection, SQLDialect.H2)); + } + + public AbstractOracle(DSLContext dslCtx) { + this.dslCtx = dslCtx; + } public static void addAssertion(Connection connection, String subjectNamespace, String subjectName, String subjectRelationNamespace, String subjectRelationName, String objectNamespace, - String objectName, String objectRelationNamespace, - String objectRelationName) throws SQLException { + String objectName, String objectRelationNamespace, String objectRelationName) + throws SQLException { var subject = new Subject(new Namespace(subjectNamespace), subjectName, new Relation(new Namespace(subjectRelationNamespace), subjectRelationName)); var object = new Object(new Namespace(objectNamespace), objectName, @@ -92,8 +92,8 @@ public static void addObject(Connection connection, String objectNamespace, Stri add(DSL.using(connection, SQLDialect.H2), object); } - public static void addRelation(Connection connection, String relationNamespace, - String relationName) throws SQLException { + public static void addRelation(Connection connection, String relationNamespace, String relationName) + throws SQLException { var relation = new Relation(new Namespace(relationNamespace), relationName); add(DSL.using(connection, SQLDialect.H2), relation); @@ -134,8 +134,8 @@ public static void deleteObject(Connection connection, String objectNamespace, S delete(DSL.using(connection, SQLDialect.H2), object); } - public static void deleteRelation(Connection connection, String relationNamespace, - String relationName) throws SQLException { + public static void deleteRelation(Connection connection, String relationNamespace, String relationName) + throws SQLException { var relation = new Relation(new Namespace(relationNamespace), relationName); delete(DSL.using(connection, SQLDialect.H2), relation); @@ -151,8 +151,8 @@ public static void deleteSubject(Connection connection, String subjectNamespace, public static void mapObject(Connection connection, String parentNamespace, String parentName, String parentRelationNamespace, String parentRelationName, String childNamespace, - String childName, String childRelationNamespace, - String childRelationName) throws SQLException { + String childName, String childRelationNamespace, String childRelationName) + throws SQLException { var parent = new Object(new Namespace(parentNamespace), parentName, new Relation(new Namespace(parentRelationNamespace), parentRelationName)); var child = new Object(new Namespace(childNamespace), childName, @@ -171,8 +171,8 @@ public static void mapRelation(Connection connection, String parentNamespace, St public static void mapSubject(Connection connection, String parentNamespace, String parentName, String parentRelationNamespace, String parentRelationName, String childNamespace, - String childName, String childRelationNamespace, - String childRelationName) throws SQLException { + String childName, String childRelationNamespace, String childRelationName) + throws SQLException { var parent = new Subject(new Namespace(parentNamespace), parentName, new Relation(new Namespace(parentRelationNamespace), parentRelationName)); var child = new Subject(new Namespace(childNamespace), childName, @@ -183,8 +183,8 @@ public static void mapSubject(Connection connection, String parentNamespace, Str public static void removeObject(Connection connection, String parentNamespace, String parentName, String parentRelationNamespace, String parentRelationName, String childNamespace, - String childName, String childRelationNamespace, - String childRelationName) throws SQLException { + String childName, String childRelationNamespace, String childRelationName) + throws SQLException { var parent = new Object(new Namespace(parentNamespace), parentName, new Relation(new Namespace(parentRelationNamespace), parentRelationName)); var child = new Object(new Namespace(childNamespace), childName, @@ -203,8 +203,8 @@ public static void removeRelation(Connection connection, String parentNamespace, public static void removeSubject(Connection connection, String parentNamespace, String parentName, String parentRelationNamespace, String parentRelationName, String childNamespace, - String childName, String childRelationNamespace, - String childRelationName) throws SQLException { + String childName, String childRelationNamespace, String childRelationName) + throws SQLException { var parent = new Subject(new Namespace(parentNamespace), parentName, new Relation(new Namespace(parentRelationNamespace), parentRelationName)); var child = new Subject(new Namespace(childNamespace), childName, @@ -397,10 +397,10 @@ static void deleteEdge(DSLContext c, Long parent, String type, Long child) throw .from(EDGE) .where(EDGE.CHILD.eq(parent)) - .union(context.select(DSL.val(parent), - EDGE.CHILD.as(EDGE.CHILD)) - .from(EDGE) - .where(EDGE.PARENT.eq(child))) + .union( + context.select(DSL.val(parent), EDGE.CHILD.as(EDGE.CHILD)) + .from(EDGE) + .where(EDGE.PARENT.eq(child))) .union(context.select(A.PARENT, B.CHILD) .from(A) @@ -409,8 +409,7 @@ static void deleteEdge(DSLContext c, Long parent, String type, Long child) throw .and(B.PARENT.eq(child))) .asTable(suspect)) .on(sParent.eq(EDGE.PARENT)) - .and(sChild.eq(EDGE.CHILD))) - .and(EDGE.TRANSITIVE.isTrue())) + .and(sChild.eq(EDGE.CHILD))).and(EDGE.TRANSITIVE.isTrue())) .execute(); context.with(ROWZ) @@ -448,22 +447,22 @@ static void deleteEdge(DSLContext c, Long parent, String type, Long child) throw } static SelectJoinStep> grants(Long s, DSLContext ctx, Long o) throws SQLException { - Table> subject = ctx.select(EDGE.CHILD.as("SUBJECT_ID")) - .from(EDGE) - .where(EDGE.TYPE.eq(SUBJECT_TYPE)) - .and(EDGE.PARENT.eq(s)) - .union(ctx.select(DSL.val(s).as("SUBJECT_ID"))) - .asTable(); - Field subjectId = subject.field("SUBJECT_ID", Long.class); - - Table> object = ctx.select(EDGE.CHILD.as("OBJECT_ID")) - .from(EDGE) - .where(EDGE.TYPE.eq(OBJECT_TYPE)) - .and(EDGE.PARENT.eq(o)) - .union(DSL.select(DSL.val(o).as("OBJECT_ID"))) - .asTable(); - - Field objectId = object.field("OBJECT_ID", Long.class); + var subject = ctx.select(EDGE.CHILD.as("SUBJECT_ID")) + .from(EDGE) + .where(EDGE.TYPE.eq(SUBJECT_TYPE)) + .and(EDGE.PARENT.eq(s)) + .union(ctx.select(DSL.val(s).as("SUBJECT_ID"))) + .asTable(); + var subjectId = subject.field("SUBJECT_ID", Long.class); + + var object = ctx.select(EDGE.CHILD.as("OBJECT_ID")) + .from(EDGE) + .where(EDGE.TYPE.eq(OBJECT_TYPE)) + .and(EDGE.PARENT.eq(o)) + .union(DSL.select(DSL.val(o).as("OBJECT_ID"))) + .asTable(); + + var objectId = object.field("OBJECT_ID", Long.class); return ctx.select(subjectId, objectId) .from(subject.crossJoin(object) @@ -520,10 +519,10 @@ static void remove(Subject parent, DSLContext context, Subject child) throws SQL } static Long resolve(DSLContext context, Namespace namespace) throws SQLException { - Record1 resolved = context.select(NAMESPACE.ID) - .from(NAMESPACE) - .where(NAMESPACE.NAME.eq(namespace.name())) - .fetchOne(); + var resolved = context.select(NAMESPACE.ID) + .from(NAMESPACE) + .where(NAMESPACE.NAME.eq(namespace.name())) + .fetchOne(); if (resolved == null) { return null; } @@ -661,19 +660,9 @@ static NamespacedId resolveAdd(DSLContext context, Subject subject) throws SQLEx return new NamespacedId(namespace, resolved.value1(), relation.id()); } - private final DSLContext dslCtx; - - public AbstractOracle(Connection connection) { - this(DSL.using(connection, SQLDialect.H2)); - } - - public AbstractOracle(DSLContext dslCtx) { - this.dslCtx = dslCtx; - } - /** * Check the assertion. - * + * * @return true if the assertion is made, false if not */ public boolean check(Assertion assertion) throws SQLException { @@ -686,10 +675,9 @@ public boolean check(Assertion assertion) throws SQLException { } /** - * Answer the list of direct and transitive Subjects that map to the supplied - * object. The query only considers subjects with assertions that match the - * object completely - i.e. {namespace, name, relation} - * + * Answer the list of direct and transitive Subjects that map to the supplied object. The query only considers + * subjects with assertions that match the object completely - i.e. {namespace, name, relation} + * * @throws SQLException */ public List expand(Object object) throws SQLException { @@ -697,11 +685,10 @@ public List expand(Object object) throws SQLException { } /** - * Answer the list of direct and transitive Subjects that map to the object from - * subjects that have the supplied predicate as their relation. The query only - * considers assertions that match the object completely - i.e. {namespace, - * name, relation} - * + * Answer the list of direct and transitive Subjects that map to the object from subjects that have the supplied + * predicate as their relation. The query only considers assertions that match the object completely - i.e. + * {namespace, name, relation} + * * @throws SQLException */ public List expand(Relation predicate, Object object) throws SQLException { @@ -709,11 +696,10 @@ public List expand(Relation predicate, Object object) throws SQLExcepti } /** - * Answer the list of direct and transitive Objects that map to the subject from - * objects that have the supplied predicate as their relation. The query only - * considers assertions that match the subject completely - i.e. {namespace, - * name, relation} - * + * Answer the list of direct and transitive Objects that map to the subject from objects that have the supplied + * predicate as their relation. The query only considers assertions that match the subject completely - i.e. + * {namespace, name, relation} + * * @throws SQLException */ @Override @@ -722,10 +708,9 @@ public List expand(Relation predicate, Subject subject) throws SQLExcept } /** - * Answer the list of direct and transitive Objects that map to the supplied - * subject. The query only considers objects with assertions that match the - * subject completely - i.e. {namespace, name, relation} - * + * Answer the list of direct and transitive Objects that map to the supplied subject. The query only considers + * objects with assertions that match the subject completely - i.e. {namespace, name, relation} + * * @throws SQLException */ @Override @@ -734,10 +719,9 @@ public List expand(Subject subject) throws SQLException { } /** - * Answer the list of direct Subjects that map to the supplied objects. The - * query only considers subjects with assertions that match the objects - * completely - i.e. {namespace, name, relation} - * + * Answer the list of direct Subjects that map to the supplied objects. The query only considers subjects with + * assertions that match the objects completely - i.e. {namespace, name, relation} + * * @throws SQLException */ public List read(Object... objects) throws SQLException { @@ -752,11 +736,10 @@ public List read(Object... objects) throws SQLException { } /** - * Answer the list of direct Subjects that map to the supplied objects. The - * query only considers subjects with assertions that match the objects - * completely - i.e. {namespace, name, relation} and only the subjects that have + * Answer the list of direct Subjects that map to the supplied objects. The query only considers subjects with + * assertions that match the objects completely - i.e. {namespace, name, relation} and only the subjects that have * the matching predicate - * + * * @throws SQLException */ public List read(Relation predicate, Object... objects) throws SQLException { @@ -771,11 +754,10 @@ public List read(Relation predicate, Object... objects) throws SQLExcep } /** - * Answer the list of direct Objects that map to the supplied subjects. The - * query only considers objects with assertions that match the subjects - * completely - i.e. {namespace, name, relation} and only the objects that have + * Answer the list of direct Objects that map to the supplied subjects. The query only considers objects with + * assertions that match the subjects completely - i.e. {namespace, name, relation} and only the objects that have * the matching predicate - * + * * @throws SQLException */ @Override @@ -791,10 +773,9 @@ public List read(Relation predicate, Subject... subjects) throws SQLExce } /** - * Answer the list of direct Objects that map to the supplied subjects. The - * query only considers objects with assertions that match the subjects - * completely - i.e. {namespace, name, relation} - * + * Answer the list of direct Objects that map to the supplied subjects. The query only considers objects with + * assertions that match the subjects completely - i.e. {namespace, name, relation} + * * @throws SQLException */ @Override @@ -810,11 +791,10 @@ public List read(Subject... subjects) throws SQLException { } /** - * Answer the list of direct and transitive subjects that map to the object. - * These subjects may be further filtered by the predicate Relation, if not - * null. The query only considers assertions that match the object completely - + * Answer the list of direct and transitive subjects that map to the object. These subjects may be further filtered + * by the predicate Relation, if not null. The query only considers assertions that match the object completely - * i.e. {namespace, name, relation} - * + * * @throws SQLException */ @Override @@ -942,11 +922,10 @@ private Stream directSubjects(Relation predicate, Object object) throws } /** - * Answer the list of direct and transitive objects that map to the subject. - * These object may further filtered by the predicate Relation, if not null. The - * query only considers assertions that match the subject completely - i.e. + * Answer the list of direct and transitive objects that map to the subject. These object may further filtered by + * the predicate Relation, if not null. The query only considers assertions that match the subject completely - i.e. * {namespace, name, relation} - * + * * @throws SQLException */ private Stream objects(Relation predicate, Subject subject) throws SQLException { diff --git a/delphinius/src/test/java/com/salesforce/apollo/delphinius/Questions3Test.java b/delphinius/src/test/java/com/salesforce/apollo/delphinius/Questions3Test.java index 4ad15dcc0a..8d5d20f16b 100644 --- a/delphinius/src/test/java/com/salesforce/apollo/delphinius/Questions3Test.java +++ b/delphinius/src/test/java/com/salesforce/apollo/delphinius/Questions3Test.java @@ -6,26 +6,21 @@ */ package com.salesforce.apollo.delphinius; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; +import com.salesforce.apollo.delphinius.Oracle.Assertion; +import liquibase.Liquibase; +import liquibase.database.core.H2Database; +import liquibase.resource.ClassLoaderResourceAccessor; +import org.h2.jdbc.JdbcConnection; +import org.junit.jupiter.api.Test; import java.util.Arrays; import java.util.Properties; import java.util.Random; -import org.h2.jdbc.JdbcConnection; -import org.junit.jupiter.api.Test; - -import com.salesforce.apollo.delphinius.Oracle.Assertion; - -import liquibase.Liquibase; -import liquibase.database.core.H2Database; -import liquibase.resource.ClassLoaderResourceAccessor; +import static org.junit.jupiter.api.Assertions.*; /** * @author hal.hildebrand - * */ public class Questions3Test { @@ -73,7 +68,7 @@ private void smoke(Oracle oracle) throws Exception { var member = ns.relation("member"); var flag = ns.relation("flag"); - // Group membersip + // Group membership var userMembers = ns.subject("Users", member); var adminMembers = ns.subject("Admins", member); var helpDeskMembers = ns.subject("HelpDesk", member); @@ -165,9 +160,9 @@ private void smoke(Oracle oracle) throws Exception { } // Transitive grants to view the document -// var inferredViewable = oracle.expand(egin); -// assertEquals(1, inferredViewable.size()); -// assertTrue(inferredViewable.contains(object123View), "Should contain: " + object123View); + // var inferredViewable = oracle.expand(egin); + // assertEquals(1, inferredViewable.size()); + // assertTrue(inferredViewable.contains(object123View), "Should contain: " + object123View); // Transitive subjects filtered by flag predicate var inferredFlaggedViewers = oracle.expand(flag, object123View); diff --git a/demo/README.md b/demo/README.md deleted file mode 100644 index d1260396b1..0000000000 --- a/demo/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Apollo Demo - -Web based demo for Apollo Delphinius \ No newline at end of file diff --git a/demo/pom.xml b/demo/pom.xml deleted file mode 100644 index 0866fe9560..0000000000 --- a/demo/pom.xml +++ /dev/null @@ -1,49 +0,0 @@ - - 4.0.0 - - com.salesforce.apollo - apollo.app - 0.0.1-SNAPSHOT - - demo - Demo - ReBAC + Identity + Key Management - - - - com.salesforce.apollo - model - - - io.dropwizard - dropwizard-core - - - - - org.hamcrest - hamcrest - test - - - org.junit.jupiter - junit-jupiter-params - test - - - org.junit.jupiter - junit-jupiter-engine - test - - - org.mockito - mockito-core - test - - - io.dropwizard - dropwizard-testing - test - - - \ No newline at end of file diff --git a/demo/src/main/java/com/salesforce/apollo/demo/DelphiResource.java b/demo/src/main/java/com/salesforce/apollo/demo/DelphiResource.java deleted file mode 100644 index be2ed1306f..0000000000 --- a/demo/src/main/java/com/salesforce/apollo/demo/DelphiResource.java +++ /dev/null @@ -1,404 +0,0 @@ -/* - * Copyright (c) 2021, salesforce.com, inc. - * All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * For full license text, see the LICENSE file in the repo root or https:/opensource.org/licenses/BSD-3-Clause - */ -package com.salesforce.apollo.demo; - -import java.sql.SQLException; -import java.time.Duration; -import java.util.List; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; -import java.util.stream.Stream; - -import com.codahale.metrics.annotation.Timed; -import com.salesforce.apollo.delphinius.Oracle; -import com.salesforce.apollo.delphinius.Oracle.Assertion; -import com.salesforce.apollo.delphinius.Oracle.Namespace; -import com.salesforce.apollo.delphinius.Oracle.Object; -import com.salesforce.apollo.delphinius.Oracle.Relation; -import com.salesforce.apollo.delphinius.Oracle.Subject; - -import jakarta.ws.rs.Consumes; -import jakarta.ws.rs.DELETE; -import jakarta.ws.rs.POST; -import jakarta.ws.rs.PUT; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.WebApplicationException; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Response; - -/** - * @author hal.hildebrand - * - */ -@Path("/delphi") -@Consumes(MediaType.APPLICATION_JSON) -@Produces(MediaType.APPLICATION_JSON) -public class DelphiResource { - - public record PredicateObject(Relation predicate, Object object) {} - - public record Assocation (T a, T b) {} - - public record PredicateObjects(Relation predicate, List objects) {} - - public record PredicateSubject(Relation predicate, Subject subject) {} - - private final Oracle oracle; - private final Duration timeout; - - public DelphiResource(Oracle oracle, Duration timeout) { - this.oracle = oracle; - this.timeout = timeout; - } - - @PUT - @Timed - @Path("admin/add/assertion") - public void add(Assertion assertion) { - try { - oracle.add(assertion).get(timeout.toMillis(), TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } catch (ExecutionException e) { - throw new WebApplicationException(e.getCause(), Response.Status.BAD_REQUEST); - } catch (TimeoutException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } - } - - @PUT - @Timed - @Path("admin/add/namespace") - public void add(Namespace namespace) { - try { - oracle.add(namespace).get(timeout.toMillis(), TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } catch (ExecutionException e) { - throw new WebApplicationException(e.getCause(), Response.Status.BAD_REQUEST); - } catch (TimeoutException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } - } - - @PUT - @Timed - @Path("admin/add/object") - public void add(Object object) { - try { - oracle.add(object).get(timeout.toMillis(), TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } catch (ExecutionException e) { - throw new WebApplicationException(e.getCause(), Response.Status.BAD_REQUEST); - } catch (TimeoutException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } - } - - @PUT - @Timed - @Path("admin/add/relation") - public void add(Relation relation) { - try { - oracle.add(relation).get(timeout.toMillis(), TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } catch (ExecutionException e) { - throw new WebApplicationException(e.getCause(), Response.Status.BAD_REQUEST); - } catch (TimeoutException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } - } - - @PUT - @Timed - @Path("admin/add/subject") - public void add(Subject subject) { - try { - oracle.add(subject).get(timeout.toMillis(), TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } catch (ExecutionException e) { - throw new WebApplicationException(e.getCause(), Response.Status.BAD_REQUEST); - } catch (TimeoutException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } - } - - @POST - @Timed - @Path("check") - public boolean check(Assertion assertion) { - try { - return oracle.check(assertion); - } catch (SQLException e) { - throw new WebApplicationException(e.getCause(), Response.Status.INTERNAL_SERVER_ERROR); - } - } - - @DELETE - @Timed - @Path("admin/delete/assertion") - public void delete(Assertion assertion) { - try { - oracle.delete(assertion).get(timeout.toMillis(), TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } catch (ExecutionException e) { - throw new WebApplicationException(e.getCause(), Response.Status.BAD_REQUEST); - } catch (TimeoutException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } - } - - @DELETE - @Timed - @Path("admin/delete/namespace") - public void delete(Namespace namespace) { - try { - oracle.delete(namespace).get(timeout.toMillis(), TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } catch (ExecutionException e) { - throw new WebApplicationException(e.getCause(), Response.Status.BAD_REQUEST); - } catch (TimeoutException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } - } - - @DELETE - @Timed - @Path("admin/delete/object") - public void delete(Object object) { - try { - oracle.delete(object).get(timeout.toMillis(), TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } catch (ExecutionException e) { - throw new WebApplicationException(e.getCause(), Response.Status.BAD_REQUEST); - } catch (TimeoutException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } - } - - @DELETE - @Timed - @Path("admin/delete/relation") - public void delete(Relation relation) { - try { - oracle.delete(relation).get(timeout.toMillis(), TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } catch (ExecutionException e) { - throw new WebApplicationException(e.getCause(), Response.Status.BAD_REQUEST); - } catch (TimeoutException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } - } - - @DELETE - @Timed - @Path("admin/delete/subject") - public void delete(Subject subject) { - try { - oracle.delete(subject).get(timeout.toMillis(), TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } catch (ExecutionException e) { - throw new WebApplicationException(e.getCause(), Response.Status.BAD_REQUEST); - } catch (TimeoutException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } - } - - @POST - @Timed - @Path("admin/expand/object") - public List expand(Object object) { - try { - return oracle.expand(object); - } catch (SQLException e) { - throw new WebApplicationException(e.getCause(), Response.Status.INTERNAL_SERVER_ERROR); - } - } - - @POST - @Timed - @Path("admin/expand/objects") - public List expand(PredicateObject predicateObject) { - try { - return oracle.expand(predicateObject.predicate, predicateObject.object); - } catch (SQLException e) { - throw new WebApplicationException(e.getCause(), Response.Status.INTERNAL_SERVER_ERROR); - } - } - - @POST - @Timed - @Path("admin/expand/subjects") - public List expand(PredicateSubject predicateSubject) { - try { - return oracle.expand(predicateSubject.predicate, predicateSubject.subject); - } catch (SQLException e) { - throw new WebApplicationException(e.getCause(), Response.Status.INTERNAL_SERVER_ERROR); - } - } - - @POST - @Timed - @Path("admin/expand/subject") - public List expand(Subject subject) { - try { - return oracle.expand(subject); - } catch (SQLException e) { - throw new WebApplicationException(e.getCause(), Response.Status.INTERNAL_SERVER_ERROR); - } - } - - @PUT - @Timed - @Path("admin/map/object") - public void mapObject(Assocation association) { - try { - oracle.map(association.a, association.b).get(); - } catch (InterruptedException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } catch (ExecutionException e) { - throw new WebApplicationException(e.getCause(), Response.Status.BAD_REQUEST); - } - } - - @PUT - @Timed - @Path("admin/map/relation") - public void mapRelation(Assocation association) { - try { - oracle.map(association.a, association.b).get(timeout.toMillis(), TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } catch (ExecutionException e) { - throw new WebApplicationException(e.getCause(), Response.Status.BAD_REQUEST); - } catch (TimeoutException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } - } - - @PUT - @Timed - @Path("admin/map/subject") - public void mapSubject(Assocation association) { - try { - oracle.map(association.a, association.b).get(timeout.toMillis(), TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } catch (ExecutionException e) { - throw new WebApplicationException(e.getCause(), Response.Status.BAD_REQUEST); - } catch (TimeoutException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } - } - - @POST - @Timed - @Path("admin/read/objects/subjects") - public List read(PredicateObjects predicateObjects) { - try { - return oracle.read(predicateObjects.predicate, - predicateObjects.objects.toArray(new Object[predicateObjects.objects.size()])); - } catch (SQLException e) { - throw new WebApplicationException(e.getCause(), Response.Status.INTERNAL_SERVER_ERROR); - } - } - - @POST - @Timed - @Path("admin/read/subjects/objects") - public Response read(PredicateSubject predicateSubject) { - return null; - } - - @POST - @Timed - @Path("admin/read/subjects") - public List readObjects(List objects) { - try { - return oracle.read(objects.toArray(new Object[objects.size()])); - } catch (SQLException e) { - throw new WebApplicationException(e.getCause(), Response.Status.INTERNAL_SERVER_ERROR); - } - } - - @POST - @Timed - @Path("admin/read/objects") - public List readSubjects(List subjects) { - try { - return oracle.read(subjects.toArray(new Subject[subjects.size()])); - } catch (SQLException e) { - throw new WebApplicationException(e.getCause(), Response.Status.INTERNAL_SERVER_ERROR); - } - } - - @DELETE - @Timed - @Path("admin/remove/object") - public void removeObjectMapping(Assocation association) { - try { - oracle.remove(association.a, association.b).get(timeout.toMillis(), TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } catch (ExecutionException e) { - throw new WebApplicationException(e.getCause(), Response.Status.BAD_REQUEST); - } catch (TimeoutException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } - } - - @DELETE - @Timed - @Path("admin/remove/relation") - public void removeRelationMapping(Assocation association) { - try { - oracle.remove(association.a, association.b).get(timeout.toMillis(), TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } catch (ExecutionException e) { - throw new WebApplicationException(e.getCause(), Response.Status.BAD_REQUEST); - } catch (TimeoutException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } - } - - @DELETE - @Timed - @Path("admin/remove/subject") - public void removeSubjectMapping(Assocation association) { - try { - oracle.remove(association.a, association.b).get(timeout.toMillis(), TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } catch (ExecutionException e) { - throw new WebApplicationException(e.getCause(), Response.Status.BAD_REQUEST); - } catch (TimeoutException e) { - throw new WebApplicationException(e, Response.Status.REQUEST_TIMEOUT); - } - } - - @POST - @Timed - @Path("admin/subjects") - public Stream subjects(PredicateObject predicateObject) { - try { - return oracle.subjects(predicateObject.predicate, predicateObject.object); - } catch (SQLException e) { - throw new WebApplicationException(e.getCause(), Response.Status.INTERNAL_SERVER_ERROR); - } - } -} diff --git a/demo/src/main/java/com/salesforce/apollo/demo/DemoApplication.java b/demo/src/main/java/com/salesforce/apollo/demo/DemoApplication.java deleted file mode 100644 index 7257c877c5..0000000000 --- a/demo/src/main/java/com/salesforce/apollo/demo/DemoApplication.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2021, salesforce.com, inc. - * All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause - */ -package com.salesforce.apollo.demo; - -import java.time.Duration; - -import com.salesforce.apollo.model.Domain; - -import io.dropwizard.core.Application; -import io.dropwizard.core.setup.Environment; - -/** - * @author hal.hildebrand - * - */ -public class DemoApplication extends Application { - - @SuppressWarnings("unused") - private Domain node; - - @Override - public void run(DemoConfiguration configuration, Environment environment) throws Exception { - environment.jersey().register(new DelphiResource(null, Duration.ofSeconds(2))); - environment.healthChecks().register("demo", new DemoHealthCheck()); - } -} diff --git a/demo/src/main/java/com/salesforce/apollo/demo/DemoConfiguration.java b/demo/src/main/java/com/salesforce/apollo/demo/DemoConfiguration.java deleted file mode 100644 index f2a4a603b5..0000000000 --- a/demo/src/main/java/com/salesforce/apollo/demo/DemoConfiguration.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2021, salesforce.com, inc. - * All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause - */ -package com.salesforce.apollo.demo; - -import com.salesforce.apollo.choam.Parameters; - -import io.dropwizard.core.Configuration; - -/** - * @author hal.hildebrand - * - */ -public class DemoConfiguration extends Configuration { - private Parameters.Builder params = Parameters.newBuilder(); - - protected Parameters.Builder getParams() { - return params; - } - - protected void setParams(Parameters.Builder params) { - this.params = params; - } -} diff --git a/demo/src/main/java/com/salesforce/apollo/demo/DemoHealthCheck.java b/demo/src/main/java/com/salesforce/apollo/demo/DemoHealthCheck.java deleted file mode 100644 index 6dfb05a90e..0000000000 --- a/demo/src/main/java/com/salesforce/apollo/demo/DemoHealthCheck.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2021, salesforce.com, inc. - * All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause - */ -package com.salesforce.apollo.demo; - -import com.codahale.metrics.health.HealthCheck; - -/** - * @author hal.hildebrand - * - */ -public class DemoHealthCheck extends HealthCheck { - - @Override - protected Result check() throws Exception { - return Result.healthy(); - } - -} diff --git a/demo/src/main/java/com/salesforce/apollo/demo/ProtobufMimeProvider.java b/demo/src/main/java/com/salesforce/apollo/demo/ProtobufMimeProvider.java deleted file mode 100644 index f8e816f76a..0000000000 --- a/demo/src/main/java/com/salesforce/apollo/demo/ProtobufMimeProvider.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright (c) 2021, salesforce.com, inc. - * All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause - */ -package com.salesforce.apollo.demo; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.lang.annotation.Annotation; -import java.lang.reflect.Method; -import java.lang.reflect.Type; - -import com.google.protobuf.GeneratedMessage; -import com.google.protobuf.Message; - -import jakarta.ws.rs.Consumes; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.WebApplicationException; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.MultivaluedMap; -import jakarta.ws.rs.ext.MessageBodyReader; -import jakarta.ws.rs.ext.MessageBodyWriter; -import jakarta.ws.rs.ext.Provider; - -/** - * @author hal.hildebrand - * - */ -@Provider -@Consumes("application/x-protobuf") -@Produces("application/x-protobuf") - -public class ProtobufMimeProvider implements MessageBodyWriter, MessageBodyReader { - // MessageBodyWriter Implementation - @Override - public long getSize(Message message, Class arg1, Type arg2, Annotation[] arg3, MediaType arg4) { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - try { - message.writeTo(baos); - } catch (IOException e) { - return -1; - } - return baos.size(); - } - - // MessageBodyReader Implementation - @Override - public boolean isReadable(Class arg0, Type arg1, Annotation[] arg2, MediaType arg3) { - return Message.class.isAssignableFrom(arg0); - } - - @Override - public boolean isWriteable(Class arg0, Type arg1, Annotation[] arg2, MediaType arg3) { - return Message.class.isAssignableFrom(arg0); - } - - @Override - public Message readFrom(Class arg0, Type arg1, Annotation[] arg2, MediaType arg3, - MultivaluedMap arg4, - InputStream istream) throws IOException, WebApplicationException { - try { - Method builderMethod = arg0.getMethod("newBuilder"); - GeneratedMessage.Builder builder = (GeneratedMessage.Builder) builderMethod.invoke(arg0); - return builder.mergeFrom(istream).build(); - } catch (Exception e) { - throw new WebApplicationException(e); - } - } - - @Override - public void writeTo(Message message, Class arg1, Type arg2, Annotation[] arg3, MediaType arg4, - MultivaluedMap arg5, - OutputStream ostream) throws IOException, WebApplicationException { - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - message.writeTo(baos); - ostream.write(baos.toByteArray()); - } -} diff --git a/demo/src/test/java/com/salesforce/apollo/demo/DemoTest.java b/demo/src/test/java/com/salesforce/apollo/demo/DemoTest.java deleted file mode 100644 index 71d29c974b..0000000000 --- a/demo/src/test/java/com/salesforce/apollo/demo/DemoTest.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2021, salesforce.com, inc. - * All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause - */ -package com.salesforce.apollo.demo; - -import static org.junit.Assert.assertNotNull; - -import org.junit.ClassRule; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; - -import io.dropwizard.testing.ResourceHelpers; -import io.dropwizard.testing.junit5.DropwizardAppExtension; -import io.dropwizard.testing.junit5.DropwizardExtensionsSupport; -import jakarta.ws.rs.client.Client; -import jakarta.ws.rs.client.Entity; -import jakarta.ws.rs.core.Response; - -/** - * @author hal.hildebrand - * - */ -@ExtendWith(DropwizardExtensionsSupport.class) -public class DemoTest { - @ClassRule - private static DropwizardAppExtension EXT = new DropwizardAppExtension<>(DemoApplication.class, - ResourceHelpers.resourceFilePath("demo-test.yaml")); - - @Test - void smokin() { - Client client = EXT.client(); - - Response response = client.target(String.format("http://localhost:%d/login", EXT.getLocalPort())) - .request() - .post(Entity.json(testQuery())); - assertNotNull(response); - - } - - private Object testQuery() { - // TODO Auto-generated method stub - return null; - } - -} diff --git a/demo/src/test/resources/demo-test.yaml b/demo/src/test/resources/demo-test.yaml deleted file mode 100644 index 21335002ad..0000000000 --- a/demo/src/test/resources/demo-test.yaml +++ /dev/null @@ -1,10 +0,0 @@ -logging: - level: INFO - appenders: - - type: console - threshold: ALL - queueSize: 512 - discardingThreshold: 0 - timeZone: UTC - target: stdout - logFormat: "%-5p [%d{ISO8601,UTC}] %c: %m%n%rEx" diff --git a/ethereal/src/main/java/com/salesforce/apollo/ethereal/Adder.java b/ethereal/src/main/java/com/salesforce/apollo/ethereal/Adder.java index 571fbee373..d07c0a86bb 100644 --- a/ethereal/src/main/java/com/salesforce/apollo/ethereal/Adder.java +++ b/ethereal/src/main/java/com/salesforce/apollo/ethereal/Adder.java @@ -23,14 +23,14 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.salesfoce.apollo.ethereal.proto.Commit; -import com.salesfoce.apollo.ethereal.proto.Have; -import com.salesfoce.apollo.ethereal.proto.Missing; -import com.salesfoce.apollo.ethereal.proto.PreUnit_s; -import com.salesfoce.apollo.ethereal.proto.PreVote; -import com.salesfoce.apollo.ethereal.proto.SignedCommit; -import com.salesfoce.apollo.ethereal.proto.SignedPreVote; -import com.salesfoce.apollo.cryptography.proto.Biff; +import com.salesforce.apollo.ethereal.proto.Commit; +import com.salesforce.apollo.ethereal.proto.Have; +import com.salesforce.apollo.ethereal.proto.Missing; +import com.salesforce.apollo.ethereal.proto.PreUnit_s; +import com.salesforce.apollo.ethereal.proto.PreVote; +import com.salesforce.apollo.ethereal.proto.SignedCommit; +import com.salesforce.apollo.ethereal.proto.SignedPreVote; +import com.salesforce.apollo.cryptography.proto.Biff; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.JohnHancock; @@ -41,29 +41,38 @@ /** * Implements the chain Reliable Broadcast of Aleph. - * - * The public methods of the Adder correspond to the gossip replication protocol - * actions. - * - * @author hal.hildebrand * + * The public methods of the Adder correspond to the gossip replication protocol actions. + * + * @author hal.hildebrand */ public class Adder { - /** - * PROPOSED -> WAITING_ON_ROUND -> PREVOTED -> WAITING_FOR_PARENTS -> COMMITTED - * -> OUTPUT - * - * FAILED can occur at each state transition - * - */ - public enum State { - COMMITTED, FAILED, OUTPUT, PREVOTED, PROPOSED, WAITING_FOR_PARENTS, WAITING_ON_ROUND; - } - - public record Signed (Digest hash, T signed) {} - private static final Logger log = LoggerFactory.getLogger(Adder.class); + private final Map> commits = new TreeMap<>(); + private final Config conf; + private final Dag dag; + private final int epoch; + private final Set failed; + private final ReentrantLock lock = new ReentrantLock(true); + private final int maxSize; + private final Map> missing = new TreeMap<>(); + private final Map> prevotes = new TreeMap<>(); + private final Map signedCommits = new TreeMap<>(); + private final Map signedPrevotes = new TreeMap<>(); + private final int threshold; + private final Map waiting = new TreeMap<>(); + private final Map waitingById = new TreeMap<>(); + private final Map waitingForRound = new TreeMap<>(); + private volatile int round = 0; + public Adder(int epoch, Dag dag, int maxSize, Config conf, Set failed) { + this.epoch = epoch; + this.dag = dag; + this.conf = conf; + this.failed = failed; + this.threshold = Dag.threshold(conf.nProc()); + this.maxSize = maxSize; + } public static Signed commit(final Long id, final Digest hash, final short pid, Signer signer, DigestAlgorithm algo) { @@ -83,32 +92,6 @@ public static Signed prevote(final Long id, final Digest hash, fi SignedPreVote.newBuilder().setVote(prevote).setSignature(signature.toSig()).build()); } - private final Map> commits = new TreeMap<>(); - private final Config conf; - private final Dag dag; - private final int epoch; - private final Set failed; - private final ReentrantLock lock = new ReentrantLock(true); - private final int maxSize; - private final Map> missing = new TreeMap<>(); - private final Map> prevotes = new TreeMap<>(); - private volatile int round = 0; - private final Map signedCommits = new TreeMap<>(); - private final Map signedPrevotes = new TreeMap<>(); - private final int threshold; - private final Map waiting = new TreeMap<>(); - private final Map waitingById = new TreeMap<>(); - private final Map waitingForRound = new TreeMap<>(); - - public Adder(int epoch, Dag dag, int maxSize, Config conf, Set failed) { - this.epoch = epoch; - this.dag = dag; - this.conf = conf; - this.failed = failed; - this.threshold = Dag.threshold(conf.nProc()); - this.maxSize = maxSize; - } - public void close() { log.trace("Closing adder epoch: {} on: {}", dag.epoch(), conf.logLabel()); locked(() -> { @@ -181,7 +164,7 @@ public String dump() { /** * Answer the Have state of the receiver - commits, prevotes, and proposed units - * + * * @return the Have state of the receiver */ public Have have() { @@ -224,9 +207,8 @@ public void produce(Unit u) { /** * Provide the missing state from the receiver state from the supplied update. - * + * * @param haves - the have state of the partner - * * @return Missing based on the current state and the haves of the receiver */ public Missing updateFor(Have haves) { @@ -299,7 +281,7 @@ public void updateFrom(Missing update) { /** * A commit was received - * + * * @param digest - the digest of the unit * @param member - the index of the member */ @@ -386,7 +368,7 @@ Map getWaitingForRound() { /** * A preVote was received - * + * * @param digest - the digest of the unit * @param member - the index of the member */ @@ -441,7 +423,7 @@ void prevote(Digest digest, short member) { /** * A unit has been proposed. - * + * * @param digest - the digest identifying the unit * @param u - the serialized preUnit */ @@ -521,8 +503,7 @@ private void advance() { } /** - * checkIfMissing sets the children() attribute of a newly created - * waitingPreunit, depending on if it was missing + * checkIfMissing sets the children() attribute of a newly created waitingPreunit, depending on if it was missing */ private void checkIfMissing(Waiting wp) { log.trace("Checking if missing: {} on: {}", wp, conf.logLabel()); @@ -541,9 +522,9 @@ private void checkIfMissing(Waiting wp) { } /** - * finds out which parents of a newly created WaitingPreUnit are in the dag, - * which are waiting, and which are missing. Sets values of waitingParents() and - * missingParents accordingly. Additionally, returns maximal heights of dag. + * finds out which parents of a newly created WaitingPreUnit are in the dag, which are waiting, and which are + * missing. Sets values of waitingParents() and missingParents accordingly. Additionally, returns maximal heights of + * dag. */ private int[] checkParents(Waiting wp) { var epoch = wp.epoch(); @@ -690,8 +671,8 @@ private void locked(Runnable r) { } /** - * Update the gossip builder with the missing units filtered by the supplied - * bloom filter indicating units already known + * Update the gossip builder with the missing units filtered by the supplied bloom filter indicating units already + * known */ private void missing(BloomFilter have, Missing.Builder builder) { var pus = new TreeMap(); @@ -744,8 +725,7 @@ private void prevote(Waiting wpu) { } /** - * registerMissing registers the fact that the given WaitingPreUnit needs an - * unknown unit with the given id. + * registerMissing registers the fact that the given WaitingPreUnit needs an unknown unit with the given id. */ private void registerMissing(long id, Waiting wp) { missing.computeIfAbsent(id, i -> new ArrayList<>()).add(wp); @@ -820,4 +800,16 @@ private boolean validateParents(Waiting wp) { } return result; } + + /** + * PROPOSED -> WAITING_ON_ROUND -> PREVOTED -> WAITING_FOR_PARENTS -> COMMITTED -> OUTPUT + * + * FAILED can occur at each state transition + */ + public enum State { + COMMITTED, FAILED, OUTPUT, PREVOTED, PROPOSED, WAITING_FOR_PARENTS, WAITING_ON_ROUND; + } + + public record Signed(Digest hash, T signed) { + } } diff --git a/ethereal/src/main/java/com/salesforce/apollo/ethereal/Crown.java b/ethereal/src/main/java/com/salesforce/apollo/ethereal/Crown.java index ef061530a5..4f81723032 100644 --- a/ethereal/src/main/java/com/salesforce/apollo/ethereal/Crown.java +++ b/ethereal/src/main/java/com/salesforce/apollo/ethereal/Crown.java @@ -8,30 +8,20 @@ import static com.salesforce.apollo.cryptography.Digest.combine; -import com.salesfoce.apollo.ethereal.proto.Crown_s; -import com.salesfoce.apollo.ethereal.proto.Crown_s.Builder; +import com.salesforce.apollo.ethereal.proto.Crown_s; +import com.salesforce.apollo.ethereal.proto.Crown_s.Builder; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; /** - * Crown represents nProc units created by different processes in a condensed - * form. It contains heights of the units and a combined hash of the units - the - * controlHash. Any missing unit is represented by height -1, and + * Crown represents nProc units created by different processes in a condensed form. It contains heights of the units and + * a combined hash of the units - the controlHash. Any missing unit is represented by height -1, and * DigestAlgorithm.origin() - * - * @author hal.hildebrand * + * @author hal.hildebrand */ public record Crown(int[] heights, Digest controlHash) { - public Crown_s toCrown_s() { - Builder builder = Crown_s.newBuilder().setControlHash(controlHash.toDigeste()); - for (int i : heights) { - builder.addHeights(i); - } - return builder.build(); - } - public static Crown crownFromParents(Unit[] parents, DigestAlgorithm algo) { var nProc = parents.length; var heights = new int[nProc]; @@ -57,4 +47,12 @@ public static Crown from(Crown_s crown) { } return new Crown(heights, new Digest(crown.getControlHash())); } + + public Crown_s toCrown_s() { + Builder builder = Crown_s.newBuilder().setControlHash(controlHash.toDigeste()); + for (int i : heights) { + builder.addHeights(i); + } + return builder.build(); + } } diff --git a/ethereal/src/main/java/com/salesforce/apollo/ethereal/Dag.java b/ethereal/src/main/java/com/salesforce/apollo/ethereal/Dag.java index 27bb5cb1e4..02356644e7 100644 --- a/ethereal/src/main/java/com/salesforce/apollo/ethereal/Dag.java +++ b/ethereal/src/main/java/com/salesforce/apollo/ethereal/Dag.java @@ -27,7 +27,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.salesfoce.apollo.ethereal.proto.PreUnit_s; +import com.salesforce.apollo.ethereal.proto.PreUnit_s; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.ethereal.PreUnit.DecodedId; import com.salesforce.apollo.membership.Context; @@ -36,9 +36,97 @@ /** * @author hal.hildebrand - * */ public interface Dag { + static final Logger log = LoggerFactory.getLogger(Dag.class); + + static short threshold(int np) { + var nProcesses = (double) np; + short minimalTrusted = (short) ((nProcesses - 1.0) / 3.0); + return minimalTrusted; + } + + static boolean validate(int nProc) { + var threshold = threshold(nProc); + return (threshold * 3 + 1) == nProc; + } + + void addCheck(BiFunction checker); + + void afterInsert(Consumer h); + + void beforeInsert(Consumer h); + + Unit build(PreUnit base, Unit[] parents); + + Correctness check(Unit u); + + boolean contains(Digest digest); + + boolean contains(long parentID); + + /** return a slce of parents of the specified unit if control hash matches */ + Decoded decodeParents(PreUnit unit); + + int epoch(); + + Unit get(Digest digest); + + List get(List digests); + + Unit get(long id); + + void have(DigestBloomFilter biff); + + void insert(Unit u); + + boolean isQuorum(short cardinality); + + void iterateMaxUnitsPerProcess(Consumer work); + + void iterateUnits(Function consumer); + + void iterateUnitsOnLevel(int level, Function work); + + /** returns the maximal level of a unit in the dag. */ + int maxLevel(); + + DagInfo maxView(); + + List maximalUnitsPerProcess(); + + void missing(BloomFilter have, List missing); + + void missing(BloomFilter have, Map missing); + + short nProc(); + + short pid(); + + T read(Callable c); + + void read(Runnable r); + + List unitsAbove(int[] heights); + + List unitsOnLevel(int level); + + void write(Runnable r); + + public interface Decoded { + default Correctness classification() { + return Correctness.CORRECT; + } + + default boolean inError() { + return true; + } + + default Unit[] parents() { + return new Unit[0]; + } + } + public class DagImpl implements Dag { private final List> checks = new ArrayList<>(); @@ -179,8 +267,8 @@ public void have(DigestBloomFilter biff) { @Override public void insert(Unit v) { if (v.epoch() != epoch) { - throw new IllegalStateException("Invalid insert of: " + v + " into epoch: " + epoch + " on: " - + config.logLabel()); + throw new IllegalStateException( + "Invalid insert of: " + v + " into epoch: " + epoch + " on: " + config.logLabel()); } write(() -> { var unit = v.embed(this); @@ -230,11 +318,6 @@ public void iterateUnitsOnLevel(int level, Function work) { }); } - @Override - public List maximalUnitsPerProcess() { - return read(() -> Arrays.asList(maxUnits)); - } - @Override public int maxLevel() { return read(() -> { @@ -265,6 +348,11 @@ public DagInfo maxView() { }); } + @Override + public List maximalUnitsPerProcess() { + return read(() -> Arrays.asList(maxUnits)); + } + @Override public void missing(BloomFilter have, List missing) { read(() -> { @@ -361,20 +449,6 @@ private void updateMaximal(Unit u) { } } - public interface Decoded { - default Correctness classification() { - return Correctness.CORRECT; - } - - default boolean inError() { - return true; - } - - default Unit[] parents() { - return new Unit[0]; - } - } - public record DecodedR(Unit[] parents) implements Decoded { @Override public boolean inError() { @@ -382,14 +456,12 @@ public boolean inError() { } } - record DagInfo(int epoch, int[] heights) {} + record DagInfo(int epoch, int[] heights) { + } class fiberMap { - public record getResult(List result, int unknown) {} - private final List content = new ArrayList<>(); private final short width; - fiberMap(short width) { this.width = width; } @@ -433,14 +505,14 @@ public Unit get(DecodedId decoded) { } /** - * get takes a list of heights (of length nProc) and returns a slice (of length - * nProc) of slices of corresponding units. The second returned value is the - * number of unknown units (no units for that creator-height pair). + * get takes a list of heights (of length nProc) and returns a slice (of length nProc) of slices of + * corresponding units. The second returned value is the number of unknown units (no units for that + * creator-height pair). */ public getResult get(int[] heights) { if (heights.length != width) { - throw new IllegalStateException("Wrong number of heights passed to fiber map: " + heights.length - + " expected: " + width); + throw new IllegalStateException( + "Wrong number of heights passed to fiber map: " + heights.length + " expected: " + width); } List result = IntStream.range(0, width).mapToObj(e -> (Unit) null).collect(Collectors.toList()); var unknown = 0; @@ -497,6 +569,9 @@ private Unit[] getFiber(int height) { } return content.get(height); } + + public record getResult(List result, int unknown) { + } } record AmbiguousParents(List units) implements Decoded { @@ -522,79 +597,4 @@ public Correctness classification() { return Correctness.UNKNOWN_PARENTS; } } - - static final Logger log = LoggerFactory.getLogger(Dag.class); - - static short threshold(int np) { - var nProcesses = (double) np; - short minimalTrusted = (short) ((nProcesses - 1.0) / 3.0); - return minimalTrusted; - } - - static boolean validate(int nProc) { - var threshold = threshold(nProc); - return (threshold * 3 + 1) == nProc; - } - - void addCheck(BiFunction checker); - - void afterInsert(Consumer h); - - void beforeInsert(Consumer h); - - Unit build(PreUnit base, Unit[] parents); - - Correctness check(Unit u); - - boolean contains(Digest digest); - - boolean contains(long parentID); - - /** return a slce of parents of the specified unit if control hash matches */ - Decoded decodeParents(PreUnit unit); - - int epoch(); - - Unit get(Digest digest); - - List get(List digests); - - Unit get(long id); - - void have(DigestBloomFilter biff); - - void insert(Unit u); - - boolean isQuorum(short cardinality); - - void iterateMaxUnitsPerProcess(Consumer work); - - void iterateUnits(Function consumer); - - void iterateUnitsOnLevel(int level, Function work); - - List maximalUnitsPerProcess(); - - /** returns the maximal level of a unit in the dag. */ - int maxLevel(); - - DagInfo maxView(); - - void missing(BloomFilter have, List missing); - - void missing(BloomFilter have, Map missing); - - short nProc(); - - short pid(); - - T read(Callable c); - - void read(Runnable r); - - List unitsAbove(int[] heights); - - List unitsOnLevel(int level); - - void write(Runnable r); } diff --git a/ethereal/src/main/java/com/salesforce/apollo/ethereal/EpochProofBuilder.java b/ethereal/src/main/java/com/salesforce/apollo/ethereal/EpochProofBuilder.java index f6b965edc9..47d4d82c1b 100644 --- a/ethereal/src/main/java/com/salesforce/apollo/ethereal/EpochProofBuilder.java +++ b/ethereal/src/main/java/com/salesforce/apollo/ethereal/EpochProofBuilder.java @@ -14,25 +14,80 @@ import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; -import com.salesfoce.apollo.ethereal.proto.EpochProof; -import com.salesfoce.apollo.ethereal.proto.EpochProof.Builder; -import com.salesfoce.apollo.ethereal.proto.Proof; +import com.salesforce.apollo.ethereal.proto.EpochProof; +import com.salesforce.apollo.ethereal.proto.EpochProof.Builder; +import com.salesforce.apollo.ethereal.proto.Proof; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.JohnHancock; /** - * the epoch proof is a message required to verify if the epoch has finished. It - * consists of id and hash of the last timing unit of the epoch. This message is - * signed with a threshold signature. - * - * @author hal.hildebrand + * the epoch proof is a message required to verify if the epoch has finished. It consists of id and hash of the last + * timing unit of the epoch. This message is signed with a threshold signature. * + * @author hal.hildebrand */ public interface EpochProofBuilder { + static final Logger log = LoggerFactory.getLogger(EpochProofBuilder.class); + + /** + * decodeShare reads signature share and the signed message from Data contained in some unit. + */ + static DecodedShare decodeShare(ByteString data) { + try { + EpochProof proof = EpochProof.parseFrom(data); + return new DecodedShare(Share.from(proof), proof); + } catch (InvalidProtocolBufferException e) { + return null; + } + } + + /** + * EpochProofBuilder checks if the given preunit is a proof that a new epoch started. + */ + static boolean epochProof(PreUnit pu, WeakThresholdKey wtk) { + if (!pu.dealing()) { + return false; + } + if (pu.epoch() == 0) { + return true; + } + EpochProof decoded; + try { + decoded = EpochProof.parseFrom(pu.data()); + } catch (InvalidProtocolBufferException e) { + return false; + } + int epoch = PreUnit.decode(decoded.getMsg().getEncodedId()).epoch(); + if (epoch + 1 != pu.epoch()) { + return false; + } + return wtk == null ? true : wtk.verifySignature(decoded); + } + + private static Proof encodeProof(Unit lastTimingUnit) { + return Proof.newBuilder().setEncodedId(lastTimingUnit.id()).setHash(lastTimingUnit.hash().toDigeste()).build(); + } + + /** + * converts signature share and the signed message into Data that can be put into unit. + */ + private static ByteString encodeShare(Share share, Proof proof) { + Builder builder = EpochProof.newBuilder(); + if (share != null) { + builder.setOwner(share.owner).setSignature(share.signature().toSig()); + } + return builder.setMsg(proof).build().toByteString(); + } + + ByteString buildShare(Unit timingUnit); + + ByteString tryBuilding(Unit unit); + + boolean verify(Unit unit); + /** * @author hal.hildebrand - * */ public record Share(short owner, JohnHancock signature) { public static Share from(EpochProof proof) { @@ -46,9 +101,8 @@ public static Share from(EpochProof proof) { record sharesDB(Config conf, ConcurrentMap> data) { /** - * Add puts the share that signs msg to the storage. If there are enough shares - * (for that msg), they are combined and the resulting signature is returned. - * Otherwise, returns nil. + * Add puts the share that signs msg to the storage. If there are enough shares (for that msg), they are + * combined and the resulting signature is returned. Otherwise, returns nil. */ JohnHancock add(DecodedShare decoded) { Digest key = new Digest(decoded.proof.getMsg().getHash()); @@ -80,10 +134,21 @@ JohnHancock add(DecodedShare decoded) { public record epochProofImpl(Config conf, int epoch, sharesDB shares) implements EpochProofBuilder { + @Override + public ByteString buildShare(Unit lastTimingUnit) { + var proof = encodeProof(lastTimingUnit); + Share share = conf.WTKey().createShare(proof, conf.pid()); + log.debug("WTK share built on: {} from: {} proof: {} share: {} on: {}", lastTimingUnit.creator(), + lastTimingUnit, proof, share, conf.logLabel()); + if (share != null) { + return encodeShare(share, proof); + } + return ByteString.EMPTY; + } + /** - * extracts threshold signature shares from finishing units. If there are enough - * shares to combine, it produces the signature and converts it to Any. - * Otherwise, null is returned. + * extracts threshold signature shares from finishing units. If there are enough shares to combine, it produces + * the signature and converts it to Any. Otherwise, null is returned. */ @Override public ByteString tryBuilding(Unit u) { @@ -109,10 +174,6 @@ public ByteString tryBuilding(Unit u) { return null; } - private ByteString encodeSignature(JohnHancock sig, EpochProof proof) { - return proof.toByteString(); - } - @Override public boolean verify(Unit unit) { if (epoch + 1 != unit.epoch()) { @@ -121,81 +182,13 @@ public boolean verify(Unit unit) { return epochProof(unit, conf.WTKey()); } - @Override - public ByteString buildShare(Unit lastTimingUnit) { - var proof = encodeProof(lastTimingUnit); - Share share = conf.WTKey().createShare(proof, conf.pid()); - log.debug("WTK share built on: {} from: {} proof: {} share: {} on: {}", lastTimingUnit.creator(), - lastTimingUnit, proof, share, conf.logLabel()); - if (share != null) { - return encodeShare(share, proof); - } - return ByteString.EMPTY; - } - - } - - record DecodedShare(Share share, EpochProof proof) {} - - static final Logger log = LoggerFactory.getLogger(EpochProofBuilder.class); - - /** - * decodeShare reads signature share and the signed message from Data contained - * in some unit. - */ - static DecodedShare decodeShare(ByteString data) { - try { - EpochProof proof = EpochProof.parseFrom(data); - return new DecodedShare(Share.from(proof), proof); - } catch (InvalidProtocolBufferException e) { - return null; - } - } - - /** - * EpochProofBuilder checks if the given preunit is a proof that a new epoch - * started. - */ - static boolean epochProof(PreUnit pu, WeakThresholdKey wtk) { - if (!pu.dealing()) { - return false; - } - if (pu.epoch() == 0) { - return true; - } - EpochProof decoded; - try { - decoded = EpochProof.parseFrom(pu.data()); - } catch (InvalidProtocolBufferException e) { - return false; - } - int epoch = PreUnit.decode(decoded.getMsg().getEncodedId()).epoch(); - if (epoch + 1 != pu.epoch()) { - return false; + private ByteString encodeSignature(JohnHancock sig, EpochProof proof) { + return proof.toByteString(); } - return wtk == null ? true : wtk.verifySignature(decoded); - } - private static Proof encodeProof(Unit lastTimingUnit) { - return Proof.newBuilder().setEncodedId(lastTimingUnit.id()).setHash(lastTimingUnit.hash().toDigeste()).build(); } - /** - * converts signature share and the signed message into Data that can be put - * into unit. - */ - private static ByteString encodeShare(Share share, Proof proof) { - Builder builder = EpochProof.newBuilder(); - if (share != null) { - builder.setOwner(share.owner).setSignature(share.signature().toSig()); - } - return builder.setMsg(proof).build().toByteString(); + record DecodedShare(Share share, EpochProof proof) { } - ByteString buildShare(Unit timingUnit); - - ByteString tryBuilding(Unit unit); - - boolean verify(Unit unit); - } diff --git a/ethereal/src/main/java/com/salesforce/apollo/ethereal/Ethereal.java b/ethereal/src/main/java/com/salesforce/apollo/ethereal/Ethereal.java index 2f474d65b3..a9d467a152 100644 --- a/ethereal/src/main/java/com/salesforce/apollo/ethereal/Ethereal.java +++ b/ethereal/src/main/java/com/salesforce/apollo/ethereal/Ethereal.java @@ -7,9 +7,9 @@ package com.salesforce.apollo.ethereal; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.ethereal.proto.Gossip; -import com.salesfoce.apollo.ethereal.proto.Missing; -import com.salesfoce.apollo.ethereal.proto.Update; +import com.salesforce.apollo.ethereal.proto.Gossip; +import com.salesforce.apollo.ethereal.proto.Missing; +import com.salesforce.apollo.ethereal.proto.Update; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.ethereal.Dag.DagImpl; import com.salesforce.apollo.ethereal.EpochProofBuilder.epochProofImpl; @@ -32,22 +32,24 @@ */ public class Ethereal { - private static final Logger log = LoggerFactory.getLogger(Ethereal.class); - private final Config config; - private final ThreadPoolExecutor consumer; - private final Creator creator; - private final AtomicInteger currentEpoch = new AtomicInteger(-1); - private final Map epochs = new ConcurrentHashMap<>(); - private final Set failed = new ConcurrentSkipListSet<>(); - private final Queue lastTiming; - private final int maxSerializedSize; - private final Consumer newEpochAction; - private final AtomicBoolean started = new AtomicBoolean(); - private final Consumer> toPreblock; + private static final Logger log = LoggerFactory.getLogger(Ethereal.class); + private final Config config; + private final ThreadPoolExecutor consumer; + private final Creator creator; + private final AtomicInteger currentEpoch = new AtomicInteger(-1); + private final Map epochs = new ConcurrentHashMap<>(); + private final Set failed = new ConcurrentSkipListSet<>(); + private final Queue lastTiming; + private final int maxSerializedSize; + private final Consumer newEpochAction; + private final AtomicBoolean started = new AtomicBoolean(); + private final Consumer> toPreblock; + public Ethereal(Config config, int maxSerializedSize, DataSource ds, BiConsumer, Boolean> blocker, Consumer newEpochAction, String label) { this(config, maxSerializedSize, ds, blocker(blocker, config), newEpochAction, label); } + public Ethereal(Config conf, int maxSerializedSize, DataSource ds, Consumer> toPreblock, Consumer newEpochAction, String label) { if (!Dag.validate(conf.nProc())) { diff --git a/ethereal/src/main/java/com/salesforce/apollo/ethereal/PreUnit.java b/ethereal/src/main/java/com/salesforce/apollo/ethereal/PreUnit.java index 474aa67bb5..b08f8eb9e2 100644 --- a/ethereal/src/main/java/com/salesforce/apollo/ethereal/PreUnit.java +++ b/ethereal/src/main/java/com/salesforce/apollo/ethereal/PreUnit.java @@ -17,8 +17,8 @@ import java.util.Map; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.ethereal.proto.PreUnit_s; -import com.salesfoce.apollo.ethereal.proto.PreUnit_s.Builder; +import com.salesforce.apollo.ethereal.proto.PreUnit_s; +import com.salesforce.apollo.ethereal.proto.PreUnit_s.Builder; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.JohnHancock; @@ -28,203 +28,9 @@ /** * @author hal.hildebrand - * */ public interface PreUnit { - record freeUnit(PreUnit p, Unit[] parents, int level, Map floor) implements Unit { - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj instanceof Unit pu) { - return hash().equals(pu.hash()); - } - return false; - } - - @Override - public int hashCode() { - return hash().hashCode(); - } - - @Override - public short creator() { - return p.creator(); - } - - @Override - public ByteString data() { - return p.data(); - } - - @Override - public int epoch() { - return p.epoch(); - } - - @Override - public Digest hash() { - return p.hash(); - } - - @Override - public int height() { - return p.height(); - } - - @Override - public Crown view() { - return p.view(); - } - - @Override - public Unit from(Unit[] parents, double bias) { - freeUnit u = new freeUnit(p, parents, Unit.levelFromParents(parents, bias), new HashMap<>()); - assert u.height() == u.level; - u.computeFloor(); - return u; - } - - @Override - public boolean aboveWithinProc(Unit v) { - if (creator() != v.creator()) { - return false; - } - Unit w; - for (w = this; w != null && w.height() > v.height(); w = w.predecessor()) - ; - if (w == null) { - return false; - } - return w.hash().equals(v.hash()); - } - - @Override - public Unit[] floor(short pid) { - var fl = floor.get(pid); - if (fl != null) { - return fl; - } - if (parents[pid] == null) { - return new Unit[0]; - } - return Arrays.copyOfRange(parents, pid, pid + 1); - } - - private void computeFloor() { - if (dealing()) { - return; - } - for (short pid = 0; pid < parents.length; pid++) { - var maximal = Unit.maximalByPid(parents, pid); - if (maximal.length > 1 || maximal.length == 1 && !maximal[0].equals(parents[pid])) { - floor.put(pid, maximal); - } - } - } - - @Override - public String toString() { - return "fu[" + shortString() + "]"; - } - - @Override - public String shortString() { - return p.shortString(); - } - - @Override - public JohnHancock signature() { - return p.signature(); - } - - @Override - public PreUnit toPreUnit() { - return p.toPreUnit(); - } - - @Override - public PreUnit_s toPreUnit_s() { - return p.toPreUnit_s(); - } - - @Override - public boolean verify(Verifier[] verifiers) { - return p.verify(verifiers); - } - } - - public record preUnit(short creator, int epoch, int height, Digest hash, Crown crown, ByteString data, - JohnHancock signature, byte[] salt) - implements PreUnit { - - @Override - public int hashCode() { - return hash.hashCode(); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj instanceof Unit u) { - return hash.equals(u.hash()); - } - return false; - } - - @Override - public PreUnit_s toPreUnit_s() { - Builder builder = PreUnit_s.newBuilder() - .setSignature(signature.toSig()) - .setId(id()) - .setCrown(crown.toCrown_s()); - if (data != null) { - builder.setData(data); - } - return builder.build(); - } - - @Override - public Crown view() { - return crown; - } - - @Override - public String toString() { - return "pu[" + shortString() + "]"; - } - - @Override - public String shortString() { - return creator() + ":" + height() + ":" + epoch(); - } - - @Override - public PreUnit toPreUnit() { - return this; - } - - @Override - public boolean verify(Verifier[] verifiers) { - if (creator >= verifiers.length) { - return false; - } - return verifiers[creator].verify(signature, PreUnit.forSigning(creator, crown, data, salt)); - } - } - - public record DecodedId(int height, short creator, int epoch) { - @Override - public String toString() { - return "[" + creator + ":" + height + ":" + epoch + "]"; - } - } - public static PreUnit from(PreUnit_s pu, DigestAlgorithm algo) { var decoded = decode(pu.getId()); @@ -299,9 +105,9 @@ static Unit newFreeUnit(short creator, int epoch, Unit[] parents, int level, Byt var salt = new byte[algo.digestLength()]; Entropy.nextSecureBytes(salt); var signature = sign(signer, id, crown, data, salt); - var u = new freeUnit(new preUnit(creator, epoch, height, signature.toDigest(algo), crown, data, signature, - salt), - parents, level, new HashMap<>()); + var u = new freeUnit( + new preUnit(creator, epoch, height, signature.toDigest(algo), crown, data, signature, salt), parents, level, + new HashMap<>()); u.computeFloor(); return u; @@ -354,4 +160,196 @@ default int round(Config conf) { boolean verify(Verifier[] verifiers); Crown view(); + + record freeUnit(PreUnit p, Unit[] parents, int level, Map floor) implements Unit { + + @Override + public boolean aboveWithinProc(Unit v) { + if (creator() != v.creator()) { + return false; + } + Unit w; + for (w = this; w != null && w.height() > v.height(); w = w.predecessor()) + ; + if (w == null) { + return false; + } + return w.hash().equals(v.hash()); + } + + @Override + public short creator() { + return p.creator(); + } + + @Override + public ByteString data() { + return p.data(); + } + + @Override + public int epoch() { + return p.epoch(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj instanceof Unit pu) { + return hash().equals(pu.hash()); + } + return false; + } + + @Override + public Unit[] floor(short pid) { + var fl = floor.get(pid); + if (fl != null) { + return fl; + } + if (parents[pid] == null) { + return new Unit[0]; + } + return Arrays.copyOfRange(parents, pid, pid + 1); + } + + @Override + public Unit from(Unit[] parents, double bias) { + freeUnit u = new freeUnit(p, parents, Unit.levelFromParents(parents, bias), new HashMap<>()); + assert u.height() == u.level; + u.computeFloor(); + return u; + } + + @Override + public Digest hash() { + return p.hash(); + } + + @Override + public int hashCode() { + return hash().hashCode(); + } + + @Override + public int height() { + return p.height(); + } + + @Override + public String shortString() { + return p.shortString(); + } + + @Override + public JohnHancock signature() { + return p.signature(); + } + + @Override + public PreUnit toPreUnit() { + return p.toPreUnit(); + } + + @Override + public PreUnit_s toPreUnit_s() { + return p.toPreUnit_s(); + } + + @Override + public String toString() { + return "fu[" + shortString() + "]"; + } + + @Override + public boolean verify(Verifier[] verifiers) { + return p.verify(verifiers); + } + + @Override + public Crown view() { + return p.view(); + } + + private void computeFloor() { + if (dealing()) { + return; + } + for (short pid = 0; pid < parents.length; pid++) { + var maximal = Unit.maximalByPid(parents, pid); + if (maximal.length > 1 || maximal.length == 1 && !maximal[0].equals(parents[pid])) { + floor.put(pid, maximal); + } + } + } + } + + public record preUnit(short creator, int epoch, int height, Digest hash, Crown crown, ByteString data, + JohnHancock signature, byte[] salt) implements PreUnit { + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj instanceof Unit u) { + return hash.equals(u.hash()); + } + return false; + } + + @Override + public int hashCode() { + return hash.hashCode(); + } + + @Override + public String shortString() { + return creator() + ":" + height() + ":" + epoch(); + } + + @Override + public PreUnit toPreUnit() { + return this; + } + + @Override + public PreUnit_s toPreUnit_s() { + Builder builder = PreUnit_s.newBuilder() + .setSignature(signature.toSig()) + .setId(id()) + .setCrown(crown.toCrown_s()); + if (data != null) { + builder.setData(data); + } + return builder.build(); + } + + @Override + public String toString() { + return "pu[" + shortString() + "]"; + } + + @Override + public boolean verify(Verifier[] verifiers) { + if (creator >= verifiers.length) { + return false; + } + return verifiers[creator].verify(signature, PreUnit.forSigning(creator, crown, data, salt)); + } + + @Override + public Crown view() { + return crown; + } + } + + public record DecodedId(int height, short creator, int epoch) { + @Override + public String toString() { + return "[" + creator + ":" + height + ":" + epoch + "]"; + } + } } diff --git a/ethereal/src/main/java/com/salesforce/apollo/ethereal/Processor.java b/ethereal/src/main/java/com/salesforce/apollo/ethereal/Processor.java index 8177872b63..e663c30aaf 100644 --- a/ethereal/src/main/java/com/salesforce/apollo/ethereal/Processor.java +++ b/ethereal/src/main/java/com/salesforce/apollo/ethereal/Processor.java @@ -7,20 +7,18 @@ package com.salesforce.apollo.ethereal; -import com.salesfoce.apollo.ethereal.proto.Gossip; -import com.salesfoce.apollo.ethereal.proto.Update; +import com.salesforce.apollo.ethereal.proto.Gossip; +import com.salesforce.apollo.ethereal.proto.Update; import com.salesforce.apollo.cryptography.Digest; /** - * * @author hal.hildebrand - * */ public interface Processor { /** * First phase request. Answer the gossip for the current state of the receiver - * + * * @param context - the digest id of the context for routing * @param ring - the ring we're gossiping on * @return the Gossip @@ -28,29 +26,25 @@ public interface Processor { Gossip gossip(Digest context, int ring); /** - * First phase reply. Answer the Update from the receiver's state, based on the - * suppled Have - * + * First phase reply. Answer the Update from the receiver's state, based on the suppled Have + * * @param gossip - the state contained by the partner - * @return the Update based on the current state of the receiver and the have - * state of the partner + * @return the Update based on the current state of the receiver and the have state of the partner */ Update gossip(Gossip gossip); /** - * Second phase, update the receiver state from the supplied update. Return an - * update based on the current state and the haves of the supplied update - * + * Second phase, update the receiver state from the supplied update. Return an update based on the current state and + * the haves of the supplied update + * * @param update - the Update from the partner - * @return the Update from the current state of the receiver and the have state - * of the supplied update + * @return the Update from the current state of the receiver and the have state of the supplied update */ Update update(Update update); /** - * Final phase; update the commit, prevote and unit state from the supplied - * update - * + * Final phase; update the commit, prevote and unit state from the supplied update + * * @param update - the Update from our partner */ void updateFrom(Update update); diff --git a/ethereal/src/main/java/com/salesforce/apollo/ethereal/Unit.java b/ethereal/src/main/java/com/salesforce/apollo/ethereal/Unit.java index 989ea8f9ce..699578936f 100644 --- a/ethereal/src/main/java/com/salesforce/apollo/ethereal/Unit.java +++ b/ethereal/src/main/java/com/salesforce/apollo/ethereal/Unit.java @@ -11,7 +11,7 @@ import java.util.List; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.ethereal.proto.PreUnit_s; +import com.salesforce.apollo.ethereal.proto.PreUnit_s; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.JohnHancock; import com.salesforce.apollo.cryptography.Verifier; @@ -19,127 +19,9 @@ /** * @author hal.hildebrand - * */ public interface Unit extends PreUnit { - record unitInDag(Unit unit, int forkingHeight) implements Unit { - - @Override - public int hashCode() { - return unit.hashCode(); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj instanceof Unit uid) { - return hash().equals(uid.hash()); - } - return false; - } - - @Override - public short creator() { - return unit.creator(); - } - - @Override - public ByteString data() { - return unit.data(); - } - - @Override - public int epoch() { - return unit.epoch(); - } - - @Override - public Digest hash() { - return unit.hash(); - } - - @Override - public int height() { - return unit.height(); - } - - @Override - public Crown view() { - return unit.view(); - } - - @Override - public boolean aboveWithinProc(Unit v) { - if (unit.height() < v.height() || unit.creator() != v.creator()) { - return false; - } - if (v instanceof unitInDag uid) { - if (v.height() < commonForkHeight(uid)) { - return true; - } - } - // Either we have a fork or a different type of unit, either way no optimization - // is possible. - return unit.aboveWithinProc(v); - - } - - int commonForkHeight(unitInDag v) { - if (forkingHeight < v.forkingHeight) { - return forkingHeight; - } - return v.forkingHeight; - } - - @Override - public Unit[] floor(short slice) { - return unit.floor(slice); - } - - @Override - public int level() { - return unit.level(); - } - - @Override - public Unit[] parents() { - return unit.parents(); - } - - @Override - public String toString() { - return "uid[" + shortString() + "]"; - } - - @Override - public String shortString() { - return creator() + ":" + level() + ":" + epoch(); - } - - @Override - public PreUnit toPreUnit() { - return unit.toPreUnit(); - } - - @Override - public PreUnit_s toPreUnit_s() { - return unit.toPreUnit_s(); - } - - @Override - public JohnHancock signature() { - return unit.signature(); - } - - @Override - public boolean verify(Verifier[] verifiers) { - return unit.verify(verifiers); - } - } - static int levelFromParents(Unit[] parents, double bias) { var nProc = (short) parents.length; var level = 0; @@ -163,8 +45,7 @@ static int levelFromParents(Unit[] parents, double bias) { } /** - * Computes all maximal units produced by a pid present in parents and their - * floors + * Computes all maximal units produced by a pid present in parents and their floors */ static Unit[] maximalByPid(Unit[] parents, short pid) { if (parents[pid] == null) { @@ -262,9 +143,8 @@ default boolean belowAny(List units) { } /** - * this implementation works as long as there is no race for writing/reading to - * dag.maxUnits, i.e. as long as units created by one process are added - * atomically + * this implementation works as long as there is no race for writing/reading to dag.maxUnits, i.e. as long as units + * created by one process are added atomically */ default int computeForkingHeight(Dag dag) { if (dealing()) { @@ -311,4 +191,121 @@ default Unit embed(Dag dag) { default Unit predecessor() { return parents()[creator()]; } + + record unitInDag(Unit unit, int forkingHeight) implements Unit { + + @Override + public boolean aboveWithinProc(Unit v) { + if (unit.height() < v.height() || unit.creator() != v.creator()) { + return false; + } + if (v instanceof unitInDag uid) { + if (v.height() < commonForkHeight(uid)) { + return true; + } + } + // Either we have a fork or a different type of unit, either way no optimization + // is possible. + return unit.aboveWithinProc(v); + + } + + @Override + public short creator() { + return unit.creator(); + } + + @Override + public ByteString data() { + return unit.data(); + } + + @Override + public int epoch() { + return unit.epoch(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj instanceof Unit uid) { + return hash().equals(uid.hash()); + } + return false; + } + + @Override + public Unit[] floor(short slice) { + return unit.floor(slice); + } + + @Override + public Digest hash() { + return unit.hash(); + } + + @Override + public int hashCode() { + return unit.hashCode(); + } + + @Override + public int height() { + return unit.height(); + } + + @Override + public int level() { + return unit.level(); + } + + @Override + public Unit[] parents() { + return unit.parents(); + } + + @Override + public String shortString() { + return creator() + ":" + level() + ":" + epoch(); + } + + @Override + public JohnHancock signature() { + return unit.signature(); + } + + @Override + public PreUnit toPreUnit() { + return unit.toPreUnit(); + } + + @Override + public PreUnit_s toPreUnit_s() { + return unit.toPreUnit_s(); + } + + @Override + public String toString() { + return "uid[" + shortString() + "]"; + } + + @Override + public boolean verify(Verifier[] verifiers) { + return unit.verify(verifiers); + } + + @Override + public Crown view() { + return unit.view(); + } + + int commonForkHeight(unitInDag v) { + if (forkingHeight < v.forkingHeight) { + return forkingHeight; + } + return v.forkingHeight; + } + } } diff --git a/ethereal/src/main/java/com/salesforce/apollo/ethereal/Waiting.java b/ethereal/src/main/java/com/salesforce/apollo/ethereal/Waiting.java index b2a17436b5..653ca1ddcb 100644 --- a/ethereal/src/main/java/com/salesforce/apollo/ethereal/Waiting.java +++ b/ethereal/src/main/java/com/salesforce/apollo/ethereal/Waiting.java @@ -10,24 +10,22 @@ import java.util.List; import java.util.concurrent.atomic.AtomicReference; -import com.salesfoce.apollo.ethereal.proto.PreUnit_s; +import com.salesforce.apollo.ethereal.proto.PreUnit_s; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.ethereal.Adder.State; /** - * * @author hal.hildebrand - * */ public class Waiting implements Comparable { - private final List children = new ArrayList<>(); - private volatile Unit decoded; - private volatile int missingParents = 0; - private final PreUnit pu; - private final PreUnit_s serialized; - private AtomicReference state = new AtomicReference<>(State.PROPOSED); - private volatile int waitingParents = 0; + private final List children = new ArrayList<>(); + private final PreUnit pu; + private final PreUnit_s serialized; + private volatile Unit decoded; + private volatile int missingParents = 0; + private AtomicReference state = new AtomicReference<>(State.PROPOSED); + private volatile int waitingParents = 0; public Waiting(PreUnit pu) { this(pu, pu.toPreUnit_s()); @@ -74,15 +72,15 @@ public void decMissing() { missingParents = m - 1; } - public Unit decoded() { - return decoded; - } - public void decWaiting() { final var w = waitingParents; waitingParents = w - 1; } + public Unit decoded() { + return decoded; + } + public int epoch() { return pu.epoch(); } diff --git a/ethereal/src/main/java/com/salesforce/apollo/ethereal/WeakThresholdKey.java b/ethereal/src/main/java/com/salesforce/apollo/ethereal/WeakThresholdKey.java index 76d6feead7..5e7d9b0ced 100644 --- a/ethereal/src/main/java/com/salesforce/apollo/ethereal/WeakThresholdKey.java +++ b/ethereal/src/main/java/com/salesforce/apollo/ethereal/WeakThresholdKey.java @@ -6,8 +6,8 @@ */ package com.salesforce.apollo.ethereal; -import com.salesfoce.apollo.ethereal.proto.EpochProof; -import com.salesfoce.apollo.ethereal.proto.Proof; +import com.salesforce.apollo.ethereal.proto.EpochProof; +import com.salesforce.apollo.ethereal.proto.Proof; import com.salesforce.apollo.cryptography.JohnHancock; import com.salesforce.apollo.cryptography.SignatureAlgorithm; import com.salesforce.apollo.ethereal.EpochProofBuilder.DecodedShare; diff --git a/ethereal/src/main/java/com/salesforce/apollo/ethereal/memberships/ChRbcGossip.java b/ethereal/src/main/java/com/salesforce/apollo/ethereal/memberships/ChRbcGossip.java index 3c1183dbec..effcb308ff 100644 --- a/ethereal/src/main/java/com/salesforce/apollo/ethereal/memberships/ChRbcGossip.java +++ b/ethereal/src/main/java/com/salesforce/apollo/ethereal/memberships/ChRbcGossip.java @@ -7,9 +7,6 @@ package com.salesforce.apollo.ethereal.memberships; import com.codahale.metrics.Timer; -import com.salesfoce.apollo.ethereal.proto.ContextUpdate; -import com.salesfoce.apollo.ethereal.proto.Gossip; -import com.salesfoce.apollo.ethereal.proto.Update; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.RouterImpl.CommonCommunications; import com.salesforce.apollo.cryptography.Digest; @@ -18,11 +15,15 @@ import com.salesforce.apollo.ethereal.memberships.comm.Gossiper; import com.salesforce.apollo.ethereal.memberships.comm.GossiperServer; import com.salesforce.apollo.ethereal.memberships.comm.GossiperService; +import com.salesforce.apollo.ethereal.proto.ContextUpdate; +import com.salesforce.apollo.ethereal.proto.Gossip; +import com.salesforce.apollo.ethereal.proto.Update; import com.salesforce.apollo.membership.Context; import com.salesforce.apollo.membership.Member; import com.salesforce.apollo.membership.SigningMember; import com.salesforce.apollo.ring.RingCommunications; import com.salesforce.apollo.utils.Entropy; +import com.salesforce.apollo.utils.Utils; import io.grpc.StatusRuntimeException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -84,13 +85,13 @@ public void start(Duration duration) { log.trace("Starting GossipService[{}] on: {}", context.getId(), member.getId()); comm.register(context.getId(), new Terminal()); var scheduler = Executors.newScheduledThreadPool(1, Thread.ofVirtual().factory()); - scheduler.schedule(() -> { + scheduler.schedule(() -> Thread.ofVirtual().start(Utils.wrapped(() -> { try { oneRound(duration, scheduler); } catch (Throwable e) { log.error("Error in gossip on: {}", member.getId(), e); } - }, initialDelay.toMillis(), TimeUnit.MILLISECONDS); + }, log)), initialDelay.toMillis(), TimeUnit.MILLISECONDS); } /** @@ -175,8 +176,9 @@ private void handle(Optional result, RingCommunications.Destination oneRound(duration, scheduler), duration.toMillis(), - TimeUnit.MILLISECONDS); + scheduled = scheduler.schedule( + () -> Thread.ofVirtual().start(Utils.wrapped(() -> oneRound(duration, scheduler), log)), + duration.toMillis(), TimeUnit.MILLISECONDS); } } } diff --git a/ethereal/src/main/java/com/salesforce/apollo/ethereal/memberships/comm/Gossiper.java b/ethereal/src/main/java/com/salesforce/apollo/ethereal/memberships/comm/Gossiper.java index 06233506f0..2b93266ce2 100644 --- a/ethereal/src/main/java/com/salesforce/apollo/ethereal/memberships/comm/Gossiper.java +++ b/ethereal/src/main/java/com/salesforce/apollo/ethereal/memberships/comm/Gossiper.java @@ -6,9 +6,9 @@ */ package com.salesforce.apollo.ethereal.memberships.comm; -import com.salesfoce.apollo.ethereal.proto.ContextUpdate; -import com.salesfoce.apollo.ethereal.proto.Gossip; -import com.salesfoce.apollo.ethereal.proto.Update; +import com.salesforce.apollo.ethereal.proto.ContextUpdate; +import com.salesforce.apollo.ethereal.proto.Gossip; +import com.salesforce.apollo.ethereal.proto.Update; import com.salesforce.apollo.archipelago.Link; import com.salesforce.apollo.membership.Member; diff --git a/ethereal/src/main/java/com/salesforce/apollo/ethereal/memberships/comm/GossiperClient.java b/ethereal/src/main/java/com/salesforce/apollo/ethereal/memberships/comm/GossiperClient.java index 5aa4227174..200f0783c1 100644 --- a/ethereal/src/main/java/com/salesforce/apollo/ethereal/memberships/comm/GossiperClient.java +++ b/ethereal/src/main/java/com/salesforce/apollo/ethereal/memberships/comm/GossiperClient.java @@ -7,10 +7,10 @@ package com.salesforce.apollo.ethereal.memberships.comm; import com.codahale.metrics.Timer.Context; -import com.salesfoce.apollo.ethereal.proto.ContextUpdate; -import com.salesfoce.apollo.ethereal.proto.Gossip; -import com.salesfoce.apollo.ethereal.proto.GossiperGrpc; -import com.salesfoce.apollo.ethereal.proto.Update; +import com.salesforce.apollo.ethereal.proto.ContextUpdate; +import com.salesforce.apollo.ethereal.proto.Gossip; +import com.salesforce.apollo.ethereal.proto.GossiperGrpc; +import com.salesforce.apollo.ethereal.proto.Update; import com.salesforce.apollo.archipelago.ManagedServerChannel; import com.salesforce.apollo.archipelago.ServerConnectionCache.CreateClientCommunications; import com.salesforce.apollo.membership.Member; @@ -21,9 +21,9 @@ */ public class GossiperClient implements Gossiper { - private final ManagedServerChannel channel; + private final ManagedServerChannel channel; private final GossiperGrpc.GossiperBlockingStub client; - private final EtherealMetrics metrics; + private final EtherealMetrics metrics; public GossiperClient(ManagedServerChannel channel, EtherealMetrics metrics) { this.channel = channel; diff --git a/ethereal/src/main/java/com/salesforce/apollo/ethereal/memberships/comm/GossiperServer.java b/ethereal/src/main/java/com/salesforce/apollo/ethereal/memberships/comm/GossiperServer.java index adcaf6125f..5532d5c4ea 100644 --- a/ethereal/src/main/java/com/salesforce/apollo/ethereal/memberships/comm/GossiperServer.java +++ b/ethereal/src/main/java/com/salesforce/apollo/ethereal/memberships/comm/GossiperServer.java @@ -8,10 +8,10 @@ import com.codahale.metrics.Timer.Context; import com.google.protobuf.Empty; -import com.salesfoce.apollo.ethereal.proto.ContextUpdate; -import com.salesfoce.apollo.ethereal.proto.Gossip; -import com.salesfoce.apollo.ethereal.proto.GossiperGrpc.GossiperImplBase; -import com.salesfoce.apollo.ethereal.proto.Update; +import com.salesforce.apollo.ethereal.proto.ContextUpdate; +import com.salesforce.apollo.ethereal.proto.Gossip; +import com.salesforce.apollo.ethereal.proto.GossiperGrpc.GossiperImplBase; +import com.salesforce.apollo.ethereal.proto.Update; import com.salesforce.apollo.archipelago.RoutableService; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.protocols.ClientIdentity; @@ -20,12 +20,11 @@ /** * @author hal.hildebrand - * */ public class GossiperServer extends GossiperImplBase { - private ClientIdentity identity; private final EtherealMetrics metrics; private final RoutableService routing; + private ClientIdentity identity; public GossiperServer(ClientIdentity identity, EtherealMetrics metrics, RoutableService r) { this.metrics = metrics; diff --git a/ethereal/src/main/java/com/salesforce/apollo/ethereal/memberships/comm/GossiperService.java b/ethereal/src/main/java/com/salesforce/apollo/ethereal/memberships/comm/GossiperService.java index f70aa82485..d95ac56aca 100644 --- a/ethereal/src/main/java/com/salesforce/apollo/ethereal/memberships/comm/GossiperService.java +++ b/ethereal/src/main/java/com/salesforce/apollo/ethereal/memberships/comm/GossiperService.java @@ -6,14 +6,13 @@ */ package com.salesforce.apollo.ethereal.memberships.comm; -import com.salesfoce.apollo.ethereal.proto.ContextUpdate; -import com.salesfoce.apollo.ethereal.proto.Gossip; -import com.salesfoce.apollo.ethereal.proto.Update; +import com.salesforce.apollo.ethereal.proto.ContextUpdate; +import com.salesforce.apollo.ethereal.proto.Gossip; +import com.salesforce.apollo.ethereal.proto.Update; import com.salesforce.apollo.cryptography.Digest; /** * @author hal.hildebrand - * */ public interface GossiperService { diff --git a/ethereal/src/test/java/com/salesforce/apollo/ethereal/EtherealTest.java b/ethereal/src/test/java/com/salesforce/apollo/ethereal/EtherealTest.java index 61e8d3b279..c8ca8482dc 100644 --- a/ethereal/src/test/java/com/salesforce/apollo/ethereal/EtherealTest.java +++ b/ethereal/src/test/java/com/salesforce/apollo/ethereal/EtherealTest.java @@ -10,7 +10,7 @@ import com.codahale.metrics.MetricRegistry; import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; -import com.salesfoce.apollo.messaging.proto.ByteMessage; +import com.salesforce.apollo.messaging.proto.ByteMessage; import com.salesforce.apollo.archipelago.LocalServer; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.ServerConnectionCache; diff --git a/fireflies/pom.xml b/fireflies/pom.xml index 47c23c04c4..702113c390 100644 --- a/fireflies/pom.xml +++ b/fireflies/pom.xml @@ -1,4 +1,5 @@ - + 4.0.0 com.salesforce.apollo @@ -7,7 +8,8 @@ fireflies Fireflies - Byzantine fault tolerant, virtually synchronous membership service and secure communications ovelay + Byzantine fault-tolerant, virtually synchronous membership service and secure communications ovelay + com.salesforce.apollo @@ -44,4 +46,4 @@ test - \ No newline at end of file + diff --git a/fireflies/src/main/java/com/salesforce/apollo/fireflies/AccusationWrapper.java b/fireflies/src/main/java/com/salesforce/apollo/fireflies/AccusationWrapper.java index 8299c64f0c..a6c32ade52 100644 --- a/fireflies/src/main/java/com/salesforce/apollo/fireflies/AccusationWrapper.java +++ b/fireflies/src/main/java/com/salesforce/apollo/fireflies/AccusationWrapper.java @@ -8,14 +8,13 @@ import static com.salesforce.apollo.cryptography.QualifiedBase64.signature; -import com.salesfoce.apollo.fireflies.proto.SignedAccusation; +import com.salesforce.apollo.fireflies.proto.SignedAccusation; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.JohnHancock; /** * @author hal.hildebrand - * */ public class AccusationWrapper { diff --git a/fireflies/src/main/java/com/salesforce/apollo/fireflies/Binding.java b/fireflies/src/main/java/com/salesforce/apollo/fireflies/Binding.java index d67622bc0d..161164c5a4 100644 --- a/fireflies/src/main/java/com/salesforce/apollo/fireflies/Binding.java +++ b/fireflies/src/main/java/com/salesforce/apollo/fireflies/Binding.java @@ -8,20 +8,19 @@ import com.codahale.metrics.Timer; import com.google.common.collect.HashMultiset; -import com.google.common.util.concurrent.ListenableFuture; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.cryptography.proto.HexBloome; -import com.salesfoce.apollo.fireflies.proto.*; import com.salesforce.apollo.archipelago.RouterImpl.CommonCommunications; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.HexBloom; import com.salesforce.apollo.cryptography.SignatureAlgorithm; +import com.salesforce.apollo.cryptography.proto.HexBloome; import com.salesforce.apollo.fireflies.View.Node; import com.salesforce.apollo.fireflies.View.Participant; import com.salesforce.apollo.fireflies.View.Seed; import com.salesforce.apollo.fireflies.View.Service; import com.salesforce.apollo.fireflies.comm.entrance.Entrance; +import com.salesforce.apollo.fireflies.proto.*; import com.salesforce.apollo.membership.Context; import com.salesforce.apollo.membership.Member; import com.salesforce.apollo.ring.SliceIterator; @@ -34,7 +33,9 @@ import java.time.Duration; import java.util.*; -import java.util.concurrent.*; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; @@ -54,17 +55,15 @@ class Binding { private final FireflyMetrics metrics; private final Node node; private final Parameters params; - private final ScheduledExecutorService scheduler; private final List seeds; private final View view; - public Binding(View view, List seeds, Duration duration, ScheduledExecutorService scheduler, - Context context, CommonCommunications approaches, Node node, - Parameters params, FireflyMetrics metrics, DigestAlgorithm digestAlgo) { + public Binding(View view, List seeds, Duration duration, Context context, + CommonCommunications approaches, Node node, Parameters params, + FireflyMetrics metrics, DigestAlgorithm digestAlgo) { this.view = view; this.duration = duration; this.seeds = new ArrayList<>(seeds); - this.scheduler = scheduler; this.context = context; this.node = node; this.params = params; @@ -82,25 +81,27 @@ void seeding() { log.info("Seeding view: {} context: {} with seeds: {} started on: {}", view.currentView(), this.context.getId(), seeds.size(), node.getId()); - var seeding = new CompletableFuture(); + var redirect = new CompletableFuture(); var timer = metrics == null ? null : metrics.seedDuration().time(); - seeding.whenComplete(join(duration, scheduler, timer)); + redirect.whenComplete(join(duration, timer)); - var seedlings = new SliceIterator<>("Seedlings", node, seeds.stream() - .map(s -> seedFor(s)) - .map(nw -> view.new Participant(nw)) - .filter(p -> !node.getId().equals(p.getId())) - .collect(Collectors.toList()), approaches); + var bootstrappers = seeds.stream() + .map(this::seedFor) + .map(nw -> view.new Participant(nw)) + .filter(p -> !node.getId().equals(p.getId())) + .collect(Collectors.toList()); + var seedlings = new SliceIterator<>("Seedlings", node, bootstrappers, approaches); AtomicReference reseed = new AtomicReference<>(); + var scheduler = Executors.newScheduledThreadPool(1, Thread.ofVirtual().factory()); reseed.set(() -> { final var registration = registration(); seedlings.iterate((link, m) -> { log.debug("Requesting Seeding from: {} on: {}", link.getMember().getId(), node.getId()); return link.seed(registration); - }, (futureSailor, link, m) -> complete(seeding, futureSailor, m), () -> { - if (!seeding.isDone()) { - scheduler.schedule(Utils.wrapped(() -> reseed.get().run(), log), params.retryDelay().toNanos(), - TimeUnit.NANOSECONDS); + }, (futureSailor, link, m) -> complete(redirect, futureSailor, m), () -> { + if (!redirect.isDone()) { + scheduler.schedule(() -> Thread.ofVirtual().start(Utils.wrapped(reseed.get(), log)), + params.retryDelay().toNanos(), TimeUnit.NANOSECONDS); } }, scheduler, params.retryDelay()); }); @@ -111,46 +112,28 @@ private void bootstrap() { log.info("Bootstrapping seed node view: {} context: {} on: {}", view.currentView(), this.context.getId(), node.getId()); var nw = node.getNote(); - final var sched = scheduler; - final var dur = duration; - view.bootstrap(nw, sched, dur); + view.bootstrap(nw, duration); } - private boolean complete(CompletableFuture redirect, Optional> futureSailor, - Member m) { + private boolean complete(CompletableFuture redirect, Optional futureSailor, Member m) { if (futureSailor.isEmpty()) { return true; } - try { - final var r = futureSailor.get().get(); - if (redirect.complete(r)) { - log.info("Redirect to view: {} context: {} from: {} on: {}", Digest.from(r.getView()), - this.context.getId(), m.getId(), node.getId()); - } + if (redirect.isDone()) { + return false; + } + final var r = futureSailor.get(); + if (redirect.complete(r)) { + log.info("Redirected to view: {} context: {} from: {} on: {}", Digest.from(r.getView()), + this.context.getId(), m.getId(), node.getId()); return false; - } catch (ExecutionException ex) { - if (ex.getCause() instanceof StatusRuntimeException sre) { - switch (sre.getStatus().getCode()) { - case RESOURCE_EXHAUSTED: - log.trace("SRE in redirect: {} on: {}", sre.getStatus(), node.getId()); - break; - default: - log.trace("SRE in redirect: {} on: {}", sre.getStatus(), node.getId()); - } - } else { - log.error("Error in redirect: {} on: {}", ex.getCause(), node.getId()); - } - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } catch (CancellationException e) { - // noop } return true; } private boolean completeGateway(Participant member, CompletableFuture gateway, - Optional> futureSailor, HashMultiset diadems, + Optional futureSailor, HashMultiset trusts, Set initialSeedSet, Digest v, int majority) { if (futureSailor.isEmpty()) { return true; @@ -159,45 +142,7 @@ private boolean completeGateway(Participant member, CompletableFuture gat return false; } - Gateway g; - try { - g = futureSailor.get().get(); - } catch (ExecutionException e) { - if (e.getCause() instanceof StatusRuntimeException sre) { - switch (sre.getStatus().getCode()) { - case RESOURCE_EXHAUSTED: - log.trace("Resource exhausted in join: {} with: {} : {} on: {}", v, member.getId(), sre.getStatus(), - node.getId()); - break; - case OUT_OF_RANGE: - log.debug("View change in join: {} with: {} : {} on: {}", v, member.getId(), sre.getStatus(), - node.getId()); - view.resetBootstrapView(); - node.reset(); - Thread.ofVirtual().factory().newThread(Utils.wrapped(() -> seeding(), log)).start(); - return false; - case DEADLINE_EXCEEDED: - log.trace("Join timeout for view: {} with: {} : {} on: {}", v, member.getId(), sre.getStatus(), - node.getId()); - break; - case UNAUTHENTICATED: - log.trace("Join unauthenticated for view: {} with: {} : {} on: {}", v, member.getId(), - sre.getStatus(), node.getId()); - break; - default: - log.warn("Failure in join: {} with: {} : {} on: {}", v, member.getId(), sre.getStatus(), - node.getId()); - } - } else { - log.error("Failure in join: {} with: {} on: {}", v, member.getId(), node.getId(), e.getCause()); - } - return true; - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - return false; - } catch (CancellationException e) { - return true; - } + Gateway g = futureSailor.get(); if (g.equals(Gateway.getDefaultInstance())) { return true; @@ -207,41 +152,64 @@ private boolean completeGateway(Participant member, CompletableFuture gat return true; } - if (g.getDiadem().equals(HexBloome.getDefaultInstance())) { - log.trace("Empty view in join returned from: {} on: {}", member.getId(), node.getId()); + if (g.getTrust().equals(BootstrapTrust.getDefaultInstance()) || g.getTrust() + .getDiadem() + .equals(HexBloome.getDefaultInstance())) { + log.trace("Empty bootstrap trust in join returned from: {} on: {}", member.getId(), node.getId()); return true; } - diadems.add(g.getDiadem()); + trusts.add(g.getTrust()); initialSeedSet.addAll(g.getInitialSeedSetList()); log.trace("Initial seed set count: {} view: {} from: {} on: {}", g.getInitialSeedSetCount(), v, member.getId(), node.getId()); - var vs = diadems.entrySet() - .stream() - .filter(e -> e.getCount() >= majority) - .map(e -> e.getElement()) - .findFirst() - .orElse(null); - if (vs != null) { - if (validate(v, g, gateway, diadems, initialSeedSet, majority)) { - return false; - } + var trust = trusts.entrySet() + .stream() + .filter(e -> e.getCount() >= majority) + .map(e -> e.getElement()) + .findFirst() + .orElse(null); + if (trust != null) { + validate(trust, gateway, initialSeedSet); + } else { + log.debug("Gateway received, trust count: {} majority: {} from: {} view: {} context: {} on: {}", + trusts.size(), majority, member.getId(), v, this.context.getId(), node.getId()); } - log.debug("Gateway received, view count: {} majority: {} from: {} view: {} context: {} on: {}", diadems.size(), - majority, member.getId(), v, this.context.getId(), node.getId()); return true; } - private Runnable exec(Runnable action) { - return () -> Thread.ofVirtual().factory().newThread(Utils.wrapped(action, log)).start(); + private void gatewaySRE(Digest v, Entrance link, StatusRuntimeException sre, AtomicInteger abandon) { + switch (sre.getStatus().getCode()) { + case OUT_OF_RANGE -> { + log.info("Gateway view: {} invalid: {} from: {} on: {}", v, sre.getMessage(), link.getMember().getId(), + node.getId()); + abandon.incrementAndGet(); + } + case FAILED_PRECONDITION -> { + log.info("Gateway view: {} unavailable: {} from: {} on: {}", v, sre.getMessage(), link.getMember().getId(), + node.getId()); + abandon.incrementAndGet(); + } + case PERMISSION_DENIED -> { + log.info("Gateway view: {} permission denied: {} from: {} on: {}", v, sre.getMessage(), + link.getMember().getId(), node.getId()); + abandon.incrementAndGet(); + } + case RESOURCE_EXHAUSTED -> { + log.info("Gateway view: {} full: {} from: {} on: {}", v, sre.getMessage(), link.getMember().getId(), + node.getId()); + abandon.incrementAndGet(); + } + default -> log.info("Join view: {} error: {} from: {} on: {}", v, sre.getMessage(), link.getMember().getId(), + node.getId()); + } } private Join join(Digest v) { return Join.newBuilder().setView(v.toDigeste()).setNote(node.getNote().getWrapped()).build(); } - private BiConsumer join(Duration duration, ScheduledExecutorService scheduler, - Timer.Context timer) { + private BiConsumer join(Duration duration, Timer.Context timer) { return (r, t) -> { if (t != null) { log.error("Failed seeding on: {}", node.getId(), t); @@ -257,31 +225,31 @@ private Join join(Digest v) { this.context.rebalance(r.getCardinality()); node.nextNote(view); - log.debug("Completing redirect to view: {} context: {} successors: {} on: {}", view, this.context.getId(), - r.getSuccessorsCount(), node.getId()); + log.debug("Completing redirect to view: {} context: {} sample: {} on: {}", view, this.context.getId(), + r.getSampleCount(), node.getId()); if (timer != null) { timer.close(); } - join(r, view, duration, scheduler); + join(r, view, duration); }; } - private void join(Redirect redirect, Digest v, Duration duration, ScheduledExecutorService scheduler) { - var successors = redirect.getSuccessorsList() - .stream() - .map(sn -> new NoteWrapper(sn.getNote(), digestAlgo)) - .map(nw -> view.new Participant(nw)) - .collect(Collectors.toList()); - log.info("Redirecting to: {} context: {} successors: {} on: {}", v, this.context.getId(), successors.size(), + private void join(Redirect redirect, Digest v, Duration duration) { + var sample = redirect.getSampleList() + .stream() + .map(sn -> new NoteWrapper(sn.getNote(), digestAlgo)) + .map(nw -> view.new Participant(nw)) + .collect(Collectors.toList()); + log.info("Redirecting to: {} context: {} sample: {} on: {}", v, this.context.getId(), sample.size(), node.getId()); var gateway = new CompletableFuture(); var timer = metrics == null ? null : metrics.joinDuration().time(); - gateway.whenComplete(view.join(scheduler, duration, timer)); + gateway.whenComplete(view.join(duration, timer)); var regate = new AtomicReference(); var retries = new AtomicInteger(); - HashMultiset diadems = HashMultiset.create(); + HashMultiset trusts = HashMultiset.create(); HashSet initialSeedSet = new HashSet<>(); final var cardinality = redirect.getCardinality(); @@ -291,25 +259,53 @@ private void join(Redirect redirect, Digest v, Duration duration, ScheduledExecu this.context.rebalance(cardinality); node.nextNote(v); - final var redirecting = new SliceIterator<>("Gateways", node, successors, approaches); + final var redirecting = new SliceIterator<>("Gateways", node, sample, approaches); var majority = redirect.getBootstrap() ? 1 : Context.minimalQuorum(redirect.getRings(), this.context.getBias()); final var join = join(v); + final var abandon = new AtomicInteger(); + var scheduler = Executors.newScheduledThreadPool(1, Thread.ofVirtual().factory()); regate.set(() -> { redirecting.iterate((link, m) -> { log.debug("Joining: {} contacting: {} on: {}", v, link.getMember().getId(), node.getId()); - return link.join(join, params.seedingTimeout()); - }, (futureSailor, link, m) -> completeGateway((Participant) m, gateway, futureSailor, diadems, + try { + var g = link.join(join, params.seedingTimeout()); + if (g == null || g.equals(Gateway.getDefaultInstance())) { + log.info("Gateway view: {} empty from: {} on: {}", v, link.getMember().getId(), node.getId()); + abandon.incrementAndGet(); + return null; + } + return g; + } catch (StatusRuntimeException sre) { + gatewaySRE(v, link, sre, abandon); + return null; + } catch (Throwable t) { + log.info("Gateway view: {} error: {} from: {} on: {}", v, t.toString(), link.getMember().getId(), + node.getId()); + abandon.incrementAndGet(); + return null; + } + }, (futureSailor, link, m) -> completeGateway((Participant) m, gateway, futureSailor, trusts, initialSeedSet, v, majority), () -> { - if (retries.get() < params.joinRetries()) { - log.debug("Failed to join view: {} retry: {} out of: {} on: {}", v, retries.incrementAndGet(), - params.joinRetries(), node.getId()); - diadems.clear(); - initialSeedSet.clear(); - scheduler.schedule(exec(() -> regate.get().run()), - Entropy.nextBitsStreamLong(params.retryDelay().toNanos()), TimeUnit.NANOSECONDS); + if (gateway.isDone()) { + return; + } + if (abandon.get() >= majority) { + log.info("Abandoning Gateway view: {} reseeding on: {}", v, node.getId()); + seeding(); } else { - log.error("Failed to join view: {} cannot obtain majority on: {}", view, node.getId()); - view.stop(); + abandon.set(0); + if (retries.get() < params.joinRetries()) { + log.info("Failed to join view: {} retry: {} out of: {} on: {}", v, retries.incrementAndGet(), + params.joinRetries(), node.getId()); + trusts.clear(); + initialSeedSet.clear(); + scheduler.schedule(() -> Thread.ofVirtual().start(Utils.wrapped(regate.get(), log)), + Entropy.nextBitsStreamLong(params.retryDelay().toNanos()), + TimeUnit.NANOSECONDS); + } else { + log.error("Failed to join view: {} cannot obtain majority Gateway on: {}", view, node.getId()); + view.stop(); + } } }, scheduler, params.retryDelay()); }); @@ -328,7 +324,8 @@ private NoteWrapper seedFor(Seed seed) { .setNote(Note.newBuilder() .setHost(seed.endpoint().getHostName()) .setPort(seed.endpoint().getPort()) - .setCoordinates(seed.coordinates().toEventCoords()) + .setCoordinates( + seed.establishment().getCoordinates().toEventCoords()) .setEpoch(-1) .setMask(ByteString.copyFrom( Node.createInitialMask(context).toByteArray()))) @@ -338,28 +335,15 @@ private NoteWrapper seedFor(Seed seed) { return new NoteWrapper(seedNote, digestAlgo); } - private boolean validate(Digest v, Gateway g, CompletableFuture gateway, HashMultiset hexes, - Set successors, int majority) { - final var max = hexes.entrySet() - .stream() - .filter(e -> e.getCount() >= majority) - .map(e -> e.getElement()) - .findFirst(); - var hex = max.orElse(null); - if (hex != null) { - final var hexBloom = new HexBloom(hex); - if (gateway.complete( - new Bound(hexBloom, successors.stream().map(sn -> new NoteWrapper(sn, digestAlgo)).toList()))) { - log.info("Gateway acquired: {} context: {} on: {}", hexBloom.compact(), this.context.getId(), - node.getId()); - } - return true; + private void validate(BootstrapTrust trust, CompletableFuture gateway, Set initialSeedSet) { + final var hexBloom = new HexBloom(trust.getDiadem()); + if (gateway.complete( + new Bound(hexBloom, trust.getSuccessorsList().stream().map(sn -> new NoteWrapper(sn, digestAlgo)).toList(), + initialSeedSet.stream().map(sn -> new NoteWrapper(sn, digestAlgo)).toList()))) { + log.info("Gateway acquired: {} context: {} on: {}", hexBloom.compact(), this.context.getId(), node.getId()); } - log.info("Gateway: {} majority not achieved: {} context: {} on: {}", v, majority, this.context.getId(), - node.getId()); - return false; } - record Bound(HexBloom view, List successors) { + record Bound(HexBloom view, List successors, List initialSeedSet) { } } diff --git a/fireflies/src/main/java/com/salesforce/apollo/fireflies/Bootstrapper.java b/fireflies/src/main/java/com/salesforce/apollo/fireflies/Bootstrapper.java new file mode 100644 index 0000000000..579cf8a687 --- /dev/null +++ b/fireflies/src/main/java/com/salesforce/apollo/fireflies/Bootstrapper.java @@ -0,0 +1,244 @@ +package com.salesforce.apollo.fireflies; + +import com.github.benmanes.caffeine.cache.CacheLoader; +import com.github.benmanes.caffeine.cache.Caffeine; +import com.github.benmanes.caffeine.cache.LoadingCache; +import com.github.benmanes.caffeine.cache.RemovalCause; +import com.google.common.collect.HashMultiset; +import com.salesforce.apollo.archipelago.RouterImpl; +import com.salesforce.apollo.cryptography.Verifier; +import com.salesforce.apollo.fireflies.comm.entrance.Entrance; +import com.salesforce.apollo.fireflies.proto.Validation; +import com.salesforce.apollo.membership.Member; +import com.salesforce.apollo.membership.SigningMember; +import com.salesforce.apollo.ring.SliceIterator; +import com.salesforce.apollo.stereotomy.*; +import com.salesforce.apollo.stereotomy.event.EstablishmentEvent; +import com.salesforce.apollo.stereotomy.event.proto.IdentAndSeq; +import com.salesforce.apollo.stereotomy.event.proto.KeyState_; +import com.salesforce.apollo.stereotomy.event.protobuf.KeyStateImpl; +import com.salesforce.apollo.stereotomy.identifier.Identifier; +import org.checkerframework.checker.nullness.qual.Nullable; +import org.joou.ULong; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Executors; + +/** + * Verifiers that delegate to a majority of the sample for event validation and verification + *

+ * + * @author hal.hildebrand + **/ +public class Bootstrapper implements Verifiers { + private final static Logger log = LoggerFactory.getLogger(Bootstrapper.class); + private final List successors; + private final SigningMember member; + private final int majority; + private final LoadingCache ksSeq; + private final RouterImpl.CommonCommunications communications; + private final Duration operationTimeout; + private final Duration operationsFrequency; + + public Bootstrapper(S member, Duration operationTimeout, + List successors, int majority, + Duration operationsFrequency, + RouterImpl.CommonCommunications communications) { + this.member = member; + this.successors = new ArrayList<>(successors); + this.majority = majority; + this.communications = communications; + this.operationTimeout = operationTimeout; + this.operationsFrequency = operationsFrequency; + ksSeq = Caffeine.newBuilder() + .maximumSize(100) + .expireAfterWrite(Duration.ofMinutes(1)) + .removalListener((IdentifierSequence seq, KeyState ks, RemovalCause cause) -> log.trace( + "KeyState {} was removed ({})", seq, cause)) + .build(new CacheLoader() { + + @Override + public @Nullable KeyState load(IdentifierSequence key) throws Exception { + return delegate(key); + } + }); + } + + public EventValidation getValidator() { + return new EventValidation() { + + @Override + public KeyState keyState(Identifier identifier, ULong seqNum) { + log.trace("Get key state: {}:{} on: {}", identifier, seqNum, member.getId()); + return ksSeq.get(new IdentifierSequence(identifier, seqNum)); + } + + @Override + public boolean validate(EstablishmentEvent event) { + log.trace("Validate event: {} on: {}", event, member.getId()); + return validate(event.getCoordinates()); + } + + @Override + public boolean validate(EventCoordinates coordinates) { + log.trace("Validating coordinates: {} on: {}", coordinates, member.getId()); + return Bootstrapper.this.validate(coordinates); + } + }; + } + + @Override + public Optional verifierFor(Identifier identifier) { + return Optional.of(new BootstrapVerifier(identifier)); + } + + @Override + public Optional verifierFor(EventCoordinates coordinates) { + return Optional.of(new BootstrapVerifier(coordinates.getIdentifier())); + } + + protected KeyState getKeyState(Identifier identifier, ULong sequenceNumber) { + return ksSeq.get(new IdentifierSequence(identifier, sequenceNumber)); + } + + private boolean complete(CompletableFuture ksFuture, Optional futureSailor, + HashMultiset keystates, Member m) { + if (futureSailor.isEmpty()) { + return true; + } + if (ksFuture.isDone()) { + return true; + } + final var ks = futureSailor.get(); + keystates.add(ks); + + var vs = keystates.entrySet() + .stream() + .filter(e -> e.getCount() >= majority) + .map(e -> e.getElement()) + .findFirst() + .orElse(null); + if (vs != null) { + var keyState = new KeyStateImpl(vs); + if (ksFuture.complete(keyState)) { + log.debug("Key state: {} received majority on: {}", keyState.getCoordinates(), member.getId()); + return false; + } + } + return true; + } + + private boolean completeValidation(CompletableFuture valid, Optional futureSailor, + HashMultiset validations, Member m) { + if (futureSailor.isEmpty()) { + return true; + } + if (valid.isDone()) { + return true; + } + final var v = futureSailor.get(); + validations.add(v); + + var validation = validations.entrySet() + .stream() + .filter(e -> e.getCount() >= majority) + .map(e -> e.getElement()) + .findFirst(); + if (!validation.isEmpty()) { + if (valid.complete(validation.get())) { + log.debug("Validation: {} received majority on: {}", validation.get().getResult(), member.getId()); + return false; + } + } + return true; + } + + private KeyState delegate(IdentifierSequence idSeq) { + log.info("Get key state: {} from slice on: {}", idSeq, member.getId()); + var iterator = new SliceIterator<>("Retrieve KeyState", member, successors, communications); + final var identifierSeq = idSeq.toIdSeq(); + var ks = new CompletableFuture(); + HashMultiset keystates = HashMultiset.create(); + iterator.iterate((link, m) -> { + log.debug("Requesting Key State from: {} on: {}", link.getMember().getId(), member.getId()); + return link.keyState(identifierSeq); + }, (futureSailor, link, m) -> complete(ks, futureSailor, keystates, m), () -> { + if (!ks.isDone()) { + log.warn("Failed to retrieve key state: {} from slice on: {}", idSeq, member.getId()); + ks.complete(null); + } + }, Executors.newScheduledThreadPool(1, Thread.ofVirtual().factory()), operationsFrequency); + try { + return ks.get(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } catch (ExecutionException e) { + log.warn("Unable to retrieve key state: {} on: {}", idSeq, member.getId()); + } + return null; + } + + private boolean validate(EventCoordinates coordinates) { + log.info("Validate event: {} from slice on: {}", coordinates, member.getId()); + var succ = successors.stream().filter(m -> coordinates.getIdentifier().equals(m.getId())).findFirst(); + if (succ.isPresent()) { + return true; + } + var iterator = new SliceIterator<>("Retrieve KeyState", member, successors, communications); + var valid = new CompletableFuture(); + HashMultiset validations = HashMultiset.create(); + iterator.iterate((link, m) -> { + log.debug("Requesting Validation: {} from: {} on: {}", coordinates, link.getMember().getId(), + member.getId()); + return link.validate(coordinates.toEventCoords()); + }, (futureSailor, link, m) -> completeValidation(valid, futureSailor, validations, m), () -> { + if (!valid.isDone()) { + log.warn("Failed to validate: {} from slice on: {}", coordinates, member.getId()); + valid.complete(Validation.newBuilder().setResult(false).build()); + } + }, Executors.newScheduledThreadPool(1, Thread.ofVirtual().factory()), operationsFrequency); + try { + return valid.get().getResult(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } catch (ExecutionException e) { + log.warn("Unable to validate: {} on: {}", coordinates, member.getId()); + } + return false; + } + + private record IdentifierSequence(Identifier identifier, ULong seqNum) { + public IdentAndSeq toIdSeq() { + return IdentAndSeq.newBuilder() + .setIdentifier(identifier.toIdent()) + .setSequenceNumber(seqNum.longValue()) + .build(); + } + + @Override + public String toString() { + return "{" + "identifier=" + identifier + ", seqNum=" + seqNum + '}'; + } + } + + private class BootstrapVerifier extends KeyStateVerifier { + + public BootstrapVerifier(Identifier identifier) { + super(identifier); + } + + @Override + protected KeyState getKeyState(ULong sequenceNumber) { + var key = new IdentifierSequence(identifier, sequenceNumber); + log.info("Get key state: {} on: {}", key, member.getId()); + return ksSeq.get(key); + } + } +} diff --git a/fireflies/src/main/java/com/salesforce/apollo/fireflies/NoteWrapper.java b/fireflies/src/main/java/com/salesforce/apollo/fireflies/NoteWrapper.java index c91c84e141..4482c0d693 100644 --- a/fireflies/src/main/java/com/salesforce/apollo/fireflies/NoteWrapper.java +++ b/fireflies/src/main/java/com/salesforce/apollo/fireflies/NoteWrapper.java @@ -6,23 +6,22 @@ */ package com.salesforce.apollo.fireflies; -import static com.salesforce.apollo.cryptography.QualifiedBase64.signature; - -import java.util.BitSet; - -import com.salesfoce.apollo.fireflies.proto.Note; -import com.salesfoce.apollo.fireflies.proto.Note.Builder; -import com.salesfoce.apollo.fireflies.proto.SignedNote; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.JohnHancock; +import com.salesforce.apollo.fireflies.proto.Note; +import com.salesforce.apollo.fireflies.proto.Note.Builder; +import com.salesforce.apollo.fireflies.proto.SignedNote; import com.salesforce.apollo.stereotomy.EventCoordinates; import com.salesforce.apollo.stereotomy.identifier.Identifier; import com.salesforce.apollo.stereotomy.identifier.SelfAddressingIdentifier; +import java.util.BitSet; + +import static com.salesforce.apollo.cryptography.QualifiedBase64.signature; + /** * @author hal.hildebrand - * */ public class NoteWrapper { private final Digest currentView; @@ -59,9 +58,11 @@ public String getHost() { } public Digest getId() { - return ((SelfAddressingIdentifier) Identifier.from(note.getNote() - .getCoordinates() - .getIdentifier())).getDigest(); + return getIdentifier().getDigest(); + } + + public SelfAddressingIdentifier getIdentifier() { + return (SelfAddressingIdentifier) Identifier.from(note.getNote().getCoordinates().getIdentifier()); } public BitSet getMask() { @@ -83,4 +84,5 @@ public SignedNote getWrapped() { public Builder newBuilder() { return Note.newBuilder(note.getNote()); } + } diff --git a/fireflies/src/main/java/com/salesforce/apollo/fireflies/Parameters.java b/fireflies/src/main/java/com/salesforce/apollo/fireflies/Parameters.java index cbcde66d68..fe96226962 100644 --- a/fireflies/src/main/java/com/salesforce/apollo/fireflies/Parameters.java +++ b/fireflies/src/main/java/com/salesforce/apollo/fireflies/Parameters.java @@ -10,7 +10,6 @@ /** * @author hal.hildebrand - * */ public record Parameters(int joinRetries, int minimumBiffCardinality, int rebuttalTimeout, int viewChangeRounds, int finalizeViewRounds, double fpr, int maximumTxfr, Duration retryDelay, int maxPending, @@ -40,10 +39,9 @@ public static class Builder { /** * Maximum number of elements to transfer per type per update */ - private int maximumTxfr = 10; + private int maximumTxfr = 1024; /** * Maximum pending joins - * */ private int maxPending = 15; /** @@ -81,68 +79,49 @@ public int getCrowns() { return crowns; } + public Builder setCrowns(int crowns) { + this.crowns = crowns; + return this; + } + public int getFinalizeViewRounds() { return finalizeViewRounds; } + public Builder setFinalizeViewRounds(int finalizeViewRounds) { + this.finalizeViewRounds = finalizeViewRounds; + return this; + } + public double getFpr() { return fpr; } + public Builder setFpr(double fpr) { + this.fpr = fpr; + return this; + } + public int getJoinRetries() { return joinRetries; } - public int getMaximumTxfr() { - return maximumTxfr; + public Builder setJoinRetries(int joinRetries) { + this.joinRetries = joinRetries; + return this; } public int getMaxPending() { return maxPending; } - public int getMinimumBiffCardinality() { - return minimumBiffCardinality; - } - - public int getRebuttalTimeout() { - return rebuttalTimeout; - } - - public Duration getRetryDelay() { - return retryDelay; - } - - public Duration getSeedingTimout() { - return seedingTimout; - } - - public int getValidationRetries() { - return validationRetries; - } - - public int getViewChangeRounds() { - return viewChangeRounds; - } - - public Builder setCrowns(int crowns) { - this.crowns = crowns; - return this; - } - - public Builder setFinalizeViewRounds(int finalizeViewRounds) { - this.finalizeViewRounds = finalizeViewRounds; - return this; - } - - public Builder setFpr(double fpr) { - this.fpr = fpr; + public Builder setMaxPending(int maxPending) { + this.maxPending = maxPending; return this; } - public Builder setJoinRetries(int joinRetries) { - this.joinRetries = joinRetries; - return this; + public int getMaximumTxfr() { + return maximumTxfr; } public Builder setMaximumTxfr(int maximumTxfr) { @@ -150,9 +129,8 @@ public Builder setMaximumTxfr(int maximumTxfr) { return this; } - public Builder setMaxPending(int maxPending) { - this.maxPending = maxPending; - return this; + public int getMinimumBiffCardinality() { + return minimumBiffCardinality; } public Builder setMinimumBiffCardinality(int minimumBiffCardinality) { @@ -160,26 +138,46 @@ public Builder setMinimumBiffCardinality(int minimumBiffCardinality) { return this; } + public int getRebuttalTimeout() { + return rebuttalTimeout; + } + public Builder setRebuttalTimeout(int rebuttalTimeout) { this.rebuttalTimeout = rebuttalTimeout; return this; } + public Duration getRetryDelay() { + return retryDelay; + } + public Builder setRetryDelay(Duration retryDelay) { this.retryDelay = retryDelay; return this; } + public Duration getSeedingTimout() { + return seedingTimout; + } + public Builder setSeedingTimout(Duration seedingTimout) { this.seedingTimout = seedingTimout; return this; } + public int getValidationRetries() { + return validationRetries; + } + public Builder setValidationRetries(int validationRetries) { this.validationRetries = validationRetries; return this; } + public int getViewChangeRounds() { + return viewChangeRounds; + } + public Builder setViewChangeRounds(int viewChangeRounds) { this.viewChangeRounds = viewChangeRounds; return this; diff --git a/fireflies/src/main/java/com/salesforce/apollo/fireflies/View.java b/fireflies/src/main/java/com/salesforce/apollo/fireflies/View.java index 388c939046..97dc711ee6 100644 --- a/fireflies/src/main/java/com/salesforce/apollo/fireflies/View.java +++ b/fireflies/src/main/java/com/salesforce/apollo/fireflies/View.java @@ -12,14 +12,12 @@ import com.google.common.collect.Multiset.Entry; import com.google.common.collect.Ordering; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.fireflies.proto.*; -import com.salesfoce.apollo.stereotomy.event.proto.KERL_; -import com.salesfoce.apollo.stereotomy.event.proto.KeyState_; -import com.salesfoce.apollo.cryptography.proto.Biff; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.Router.ServiceRouting; import com.salesforce.apollo.archipelago.RouterImpl.CommonCommunications; +import com.salesforce.apollo.bloomFilters.BloomFilter; import com.salesforce.apollo.cryptography.*; +import com.salesforce.apollo.cryptography.proto.Biff; import com.salesforce.apollo.fireflies.Binding.Bound; import com.salesforce.apollo.fireflies.ViewManagement.Ballot; import com.salesforce.apollo.fireflies.comm.entrance.Entrance; @@ -29,20 +27,27 @@ import com.salesforce.apollo.fireflies.comm.gossip.FFService; import com.salesforce.apollo.fireflies.comm.gossip.FfServer; import com.salesforce.apollo.fireflies.comm.gossip.Fireflies; +import com.salesforce.apollo.fireflies.proto.*; import com.salesforce.apollo.membership.*; import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; import com.salesforce.apollo.ring.RingCommunications; -import com.salesforce.apollo.stereotomy.ControlledIdentifier; import com.salesforce.apollo.stereotomy.EventCoordinates; import com.salesforce.apollo.stereotomy.EventValidation; +import com.salesforce.apollo.stereotomy.Verifiers; +import com.salesforce.apollo.stereotomy.event.KeyEvent; +import com.salesforce.apollo.stereotomy.event.proto.EventCoords; +import com.salesforce.apollo.stereotomy.event.proto.IdentAndSeq; +import com.salesforce.apollo.stereotomy.event.proto.KeyEvent_; +import com.salesforce.apollo.stereotomy.event.proto.KeyState_; +import com.salesforce.apollo.stereotomy.identifier.Identifier; import com.salesforce.apollo.stereotomy.identifier.SelfAddressingIdentifier; +import com.salesforce.apollo.utils.BbBackedInputStream; import com.salesforce.apollo.utils.Entropy; -import com.salesforce.apollo.membership.RoundScheduler; import com.salesforce.apollo.utils.Utils; -import com.salesforce.apollo.bloomFilters.BloomFilter; import io.grpc.Status; import io.grpc.StatusRuntimeException; import io.grpc.stub.StreamObserver; +import org.joou.ULong; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -84,39 +89,40 @@ public class View { private static final Logger log = LoggerFactory.getLogger( View.class); private static final String SCHEDULED_VIEW_CHANGE = "Scheduled View Change"; + final CommonCommunications comm; private final CommonCommunications approaches; - private final CommonCommunications comm; - private final Context context; - private final DigestAlgorithm digestAlgo; - private final RingCommunications gossiper; + private final Context context; + private final DigestAlgorithm digestAlgo; + private final RingCommunications gossiper; private final AtomicBoolean introduced = new AtomicBoolean(); private final Map lifecycleListeners = new HashMap<>(); private final FireflyMetrics metrics; - private final Node node; - private final Map observations = new ConcurrentSkipListMap<>(); - private final Parameters params; + private final Node node; + private final Map observations = new ConcurrentSkipListMap<>(); + private final Parameters params; private final ConcurrentMap pendingRebuttals = new ConcurrentSkipListMap<>(); private final RoundScheduler roundTimers; private final Set shunned = new ConcurrentSkipListSet<>(); private final AtomicBoolean started = new AtomicBoolean(); private final Map timers = new HashMap<>(); - private final EventValidation validation; private final ReadWriteLock viewChange = new ReentrantReadWriteLock( true); private final ViewManagement viewManagement; + private final EventValidation validation; + private final Verifiers verifiers; private volatile ScheduledFuture futureGossip; public View(Context context, ControlledIdentifierMember member, InetSocketAddress endpoint, - EventValidation validation, Router communications, Parameters params, DigestAlgorithm digestAlgo, - FireflyMetrics metrics) { - this(context, member, endpoint, validation, communications, params, communications, digestAlgo, metrics); + EventValidation validation, Verifiers verifiers, Router communications, Parameters params, + DigestAlgorithm digestAlgo, FireflyMetrics metrics) { + this(context, member, endpoint, validation, verifiers, communications, params, communications, digestAlgo, + metrics); } public View(Context context, ControlledIdentifierMember member, InetSocketAddress endpoint, - EventValidation validation, Router communications, Parameters params, Router gateway, - DigestAlgorithm digestAlgo, FireflyMetrics metrics) { + EventValidation validation, Verifiers verifiers, Router communications, Parameters params, + Router gateway, DigestAlgorithm digestAlgo, FireflyMetrics metrics) { this.metrics = metrics; - this.validation = validation; this.params = params; this.digestAlgo = digestAlgo; this.context = context; @@ -130,15 +136,17 @@ public View(Context context, ControlledIdentifierMember member, Ine this.approaches = gateway.create(node, context.getId(), service, service.getClass().getCanonicalName() + ":approach", r -> new EntranceServer(gateway.getClientIdentityProvider(), r, metrics), - EntranceClient.getCreate(metrics), Entrance.getLocalLoopback(node)); + EntranceClient.getCreate(metrics), Entrance.getLocalLoopback(node, service)); gossiper = new RingCommunications<>(context, node, comm); + this.validation = validation; + this.verifiers = verifiers; } /** * Check the validity of a mask. A mask is valid if the following conditions are satisfied: * *
-     * - The mask is of length 2t+1
+     * - The mask is of length bias*t+1
      * - the mask has exactly t + 1 enabled elements.
      * 
* @@ -174,6 +182,13 @@ public Context getContext() { return context; } + /** + * @return the Digest ID of the Node of this View + */ + public Digest getNodeId() { + return node.getId(); + } + /** * Register a listener to receive view change events * @@ -189,8 +204,7 @@ public UUID register(ViewLifecycleListener listener) { /** * Start the View */ - public void start(CompletableFuture onJoin, Duration d, List seedpods, - ScheduledExecutorService scheduler) { + public void start(CompletableFuture onJoin, Duration d, List seedpods) { Objects.requireNonNull(onJoin, "Join completion must not be null"); if (!started.compareAndSet(false, true)) { return; @@ -206,10 +220,12 @@ public void start(CompletableFuture onJoin, Duration d, List seedpod context.clear(); node.reset(); + var scheduler = Executors.newScheduledThreadPool(1, Thread.ofVirtual().factory()); var initial = Entropy.nextBitsStreamLong(d.toNanos()); - scheduler.schedule(Utils.wrapped( - () -> new Binding(this, seeds, d, scheduler, context, approaches, node, params, metrics, digestAlgo).seeding(), - log), initial, TimeUnit.NANOSECONDS); + scheduler.schedule(() -> Thread.ofVirtual() + .start(Utils.wrapped( + () -> new Binding(this, seeds, d, context, approaches, node, params, metrics, + digestAlgo).seeding(), log)), initial, TimeUnit.NANOSECONDS); log.info("{} started on: {}", context.getId(), node.getId()); } @@ -217,12 +233,12 @@ public void start(CompletableFuture onJoin, Duration d, List seedpod /** * Start the View */ - public void start(Runnable onJoin, Duration d, List seedpods, ScheduledExecutorService scheduler) { + public void start(Runnable onJoin, Duration d, List seedpods) { final var futureSailor = new CompletableFuture(); futureSailor.whenComplete((v, t) -> { onJoin.run(); }); - start(futureSailor, d, seedpods, scheduler); + start(futureSailor, d, seedpods); } /** @@ -261,8 +277,7 @@ boolean addToView(NoteWrapper note) { Participant m = context.getMember(note.getId()); if (m == null) { newMember = true; - if (!validation.verify(note.getCoordinates(), note.getSignature(), - note.getWrapped().getNote().toByteString())) { + if (!verify(note.getIdentifier(), note.getSignature(), note.getWrapped().getNote().toByteString())) { log.trace("invalid participant note from: {} on: {}", note.getId(), node.getId()); if (metrics != null) { metrics.filteredNotes().mark(); @@ -277,22 +292,12 @@ boolean addToView(NoteWrapper note) { long nextEpoch = note.getEpoch(); long currentEpoch = current.getEpoch(); if (nextEpoch <= currentEpoch) { - // log.trace("Note: {} epoch out of date: {} current: {} on: {}", note.getId(), nextEpoch, - // currentEpoch, node.getId()); if (metrics != null) { metrics.filteredNotes().mark(); } return false; } } - - if (!m.verify(note.getSignature(), note.getWrapped().getNote().toByteString())) { - log.trace("Note signature invalid: {} on: {}", note.getId(), node.getId()); - if (metrics != null) { - metrics.filteredNotes().mark(); - } - return false; - } } if (metrics != null) { @@ -300,7 +305,14 @@ boolean addToView(NoteWrapper note) { } var member = m; - stable(() -> { + return stable(() -> { + if (!member.verify(note.getSignature(), note.getWrapped().getNote().toByteString())) { + log.trace("Note signature invalid: {} on: {}", note.getId(), node.getId()); + if (metrics != null) { + metrics.filteredNotes().mark(); + } + return false; + } var accused = member.isAccused(); stopRebuttalTimer(member); member.setNote(note); @@ -308,37 +320,19 @@ boolean addToView(NoteWrapper note) { if (accused) { checkInvalidations(member); } - if (!viewManagement.isJoined() && context.totalCount() == context.cardinality()) { + if (!viewManagement.joined() && context.totalCount() == context.cardinality()) { assert context.totalCount() == context.cardinality(); viewManagement.join(); } else { assert context.totalCount() <= context.cardinality() : "total: " + context.totalCount() + " card: " + context.cardinality(); } + return true; }); - if (!newMember) { - if (current != null) { - if ( - current.getCoordinates().getSequenceNumber().compareTo(member.note.getCoordinates().getSequenceNumber()) - > 0) { - Thread.ofVirtual().factory().newThread(Utils.wrapped(() -> { - final var coordinates = member.note.getCoordinates(); - try { - lifecycleListeners.values().forEach(l -> { - l.update(coordinates); - }); - } catch (Throwable t) { - log.error("Error during coordinate update: {}", coordinates, t); - } - }, log)).start(); - } - } - } - return true; } - void bootstrap(NoteWrapper nw, ScheduledExecutorService sched, Duration dur) { - viewManagement.bootstrap(nw, sched, dur); + void bootstrap(NoteWrapper nw, Duration dur) { + viewManagement.bootstrap(nw, dur); } Digest bootstrapView() { @@ -357,7 +351,7 @@ void finalizeViewChange() { final var cardinality = context.memberCount(); final var superMajority = cardinality - ((cardinality - 1) / 4); if (observations.size() < superMajority) { - log.trace("Do not have supermajority: {} required: {} for: {} on: {}", observations.size(), + log.trace("Do not have super majority: {} required: {} for: {} on: {}", observations.size(), superMajority, currentView(), node.getId()); scheduleFinalizeViewChange(2); return; @@ -416,9 +410,8 @@ void introduced() { introduced.set(true); } - BiConsumer join(ScheduledExecutorService scheduler, Duration duration, - com.codahale.metrics.Timer.Context timer) { - return viewManagement.join(scheduler, duration, timer); + BiConsumer join(Duration duration, com.codahale.metrics.Timer.Context timer) { + return viewManagement.join(duration, timer); } void notifyListeners(List joining, List leaving) { @@ -434,6 +427,37 @@ void notifyListeners(List joining, List leaving) { }); } + /** + * Process the updates of the supplied juicy gossip. + * + * @param gossip + */ + void processUpdates(Gossip gossip) { + processUpdates(gossip.getNotes().getUpdatesList(), gossip.getAccusations().getUpdatesList(), + gossip.getObservations().getUpdatesList(), gossip.getJoins().getUpdatesList()); + } + + /** + * Redirect the receiver to the correct ring, processing any new accusations + * + * @param member + * @param gossip + * @param ring + */ + boolean redirect(Participant member, Gossip gossip, int ring) { + if (!gossip.hasRedirect()) { + log.warn("Redirect from: {} on ring: {} did not contain redirect member note on: {}", member.getId(), ring, + node.getId()); + return false; + } + final var redirect = new NoteWrapper(gossip.getRedirect(), digestAlgo); + add(redirect); + processUpdates(gossip); + log.debug("Redirected from: {} to: {} on ring: {} on: {}", member.getId(), redirect.getId(), ring, + node.getId()); + return true; + } + /** * Remove the participant from the context * @@ -470,9 +494,11 @@ void resetBootstrapView() { viewManagement.resetBootstrapView(); } - void schedule(final Duration duration, final ScheduledExecutorService scheduler) { - futureGossip = scheduler.schedule(Utils.wrapped(() -> gossip(duration, scheduler), log), - Entropy.nextBitsStreamLong(duration.toNanos()), TimeUnit.NANOSECONDS); + void schedule(final Duration duration) { + var scheduler = Executors.newScheduledThreadPool(1, Thread.ofVirtual().factory()); + futureGossip = scheduler.schedule( + () -> Thread.ofVirtual().start(Utils.wrapped(() -> gossip(duration, scheduler), log)), + Entropy.nextBitsStreamLong(duration.toNanos()), TimeUnit.NANOSECONDS); } void scheduleFinalizeViewChange() { @@ -540,16 +566,90 @@ Stream streamShunned() { return shunned.stream(); } + void tick() { + roundTimers.tick(); + } + void viewChange(Runnable r) { + // log.error("Enter view change on: {}", node.getId()); final var lock = viewChange.writeLock(); lock.lock(); try { r.run(); + // log.error("Exit view change on: {}", node.getId()); } finally { lock.unlock(); } } + /** + * Gossip with the member + * + * @param ring - the index of the gossip ring the gossip is originating from in this view + * @param link - the outbound communications to the paired member + * @param ring + * @throws Exception + */ + protected Gossip gossip(Fireflies link, int ring) { + tick(); + if (shunned.contains(link.getMember().getId())) { + log.trace("Shunning gossip view: {} with: {} on: {}", currentView(), link.getMember().getId(), + node.getId()); + if (metrics != null) { + metrics.shunnedGossip().mark(); + } + return null; + } + + final SayWhat gossip = stable(() -> SayWhat.newBuilder() + .setView(currentView().toDigeste()) + .setNote(node.getNote().getWrapped()) + .setRing(ring) + .setGossip(commonDigests()) + .build()); + try { + return link.gossip(gossip); + } catch (Throwable e) { + final var p = (Participant) link.getMember(); + if (!viewManagement.joined()) { + log.debug("Exception: {} bootstrap gossiping with:S {} view: {} on: {}", e.getMessage(), p.getId(), + currentView(), node.getId()); + return null; + } + if (e instanceof StatusRuntimeException sre) { + switch (sre.getStatus().getCode()) { + case PERMISSION_DENIED: + log.trace("Rejected gossip: {} view: {} from: {} on: {}", sre.getStatus(), currentView(), p.getId(), + node.getId()); + break; + case FAILED_PRECONDITION: + log.trace("Failed gossip: {} view: {} from: {} on: {}", sre.getStatus(), currentView(), p.getId(), + node.getId()); + break; + case RESOURCE_EXHAUSTED: + log.trace("Unavailable for gossip: {} view: {} from: {} on: {}", sre.getStatus(), currentView(), + p.getId(), node.getId()); + break; + case CANCELLED: + log.trace("Communication cancelled for gossip view: {} from: {} on: {}", currentView(), p.getId(), + node.getId()); + break; + default: + log.debug("Error gossiping: {} view: {} from: {} on: {}", sre.getStatus(), p.getId(), currentView(), + node.getId()); + accuse(p, ring, sre); + break; + + } + } else { + log.debug("Exception gossiping with: {} view: {} on: {}", p.getId(), currentView(), node.getId(), e); + accuse(p, ring, e); + } + return null; + } + + } + /** * Accuse the member on the ring * @@ -598,12 +698,6 @@ private boolean add(AccusationWrapper accusation) { return false; } - if (!accuser.verify(accusation.getSignature(), accusation.getWrapped().getAccusation().toByteString())) { - log.trace("Accusation discarded, accusation by: {} accused:{} signature invalid on: {}", accuser.getId(), - accused.getId(), node.getId()); - return false; - } - return add(accusation, accuser, accused); } @@ -629,6 +723,12 @@ private boolean add(AccusationWrapper accusation, Participant accuser, Participa Participant currentAccuser = context.getMember(accused.getAccusation(ring.getIndex()).getAccuser()); if (!currentAccuser.equals(accuser)) { if (ring.isBetween(currentAccuser, accuser, accused)) { + if (!accused.verify(accusation.getSignature(), + accusation.getWrapped().getAccusation().toByteString())) { + log.trace("Accusation discarded, accusation by: {} accused:{} signature invalid on: {}", + accuser.getId(), accused.getId(), node.getId()); + return false; + } accused.addAccusation(accusation); pendingRebuttals.computeIfAbsent(accused.getId(), d -> roundTimers.schedule(() -> gc(accused), params.rebuttalTimeout())); @@ -731,11 +831,13 @@ private boolean add(SignedViewChange observation) { node.getId()); return false; } - final var signature = JohnHancock.from(observation.getSignature()); - if (!member.verify(signature, observation.getChange().toByteString())) { - return false; - } - return observations.put(observer.prefix(observation.getChange().getAttempt()), observation) == null; + return observations.computeIfAbsent(observer.prefix(observation.getChange().getAttempt()), p -> { + final var signature = JohnHancock.from(observation.getSignature()); + if (!member.verify(signature, observation.getChange().toByteString())) { + return null; + } + return observation; + }) != null; } private boolean addJoin(SignedNote sn) { @@ -759,8 +861,7 @@ private boolean addJoin(SignedNote sn) { return false; } - if (!validation.verify(note.getCoordinates(), note.getSignature(), - note.getWrapped().getNote().toByteString())) { + if (!validation.validate(note.getCoordinates())) { log.trace("Invalid join note from {} on: {}", note.getId(), node.getId()); return false; } @@ -795,7 +896,7 @@ private boolean addToCurrentView(NoteWrapper note) { /** * If we monitor the target and haven't issued an alert, do so * - * @param sa + * @param target */ private void amplify(Participant target) { context.rings() @@ -846,6 +947,15 @@ private Digests commonDigests() { .build(); } + private Verifier.Filtered filtered(SelfAddressingIdentifier id, SigningThreshold threshold, JohnHancock signature, + InputStream message) { + var verifier = verifiers.verifierFor(id); + if (verifier.isEmpty()) { + return new Verifier.Filtered(false, 0, null); + } + return verifier.get().filtered(threshold, signature, message); + } + /** * Garbage collect the member. Member is now shunned and cannot recover * @@ -876,6 +986,10 @@ private BloomFilter getAccusationsBff(long seed, double p) { return bff; } + private KeyEvent getEvent(EventCoordinates coordinates) { + return null; + } + /** * @param seed * @param p @@ -912,129 +1026,68 @@ private void gossip(Duration duration, ScheduledExecutorService scheduler) { } if (context.activeCount() == 1) { - roundTimers.tick(); + tick(); } gossiper.execute((link, ring) -> gossip(link, ring), (result, destination) -> gossip(result, destination, duration, scheduler)); } - /** - * Gossip with the member - * - * @param ring - the index of the gossip ring the gossip is originating from in this view - * @param link - the outbound communications to the paired member - * @param ring - * @throws Exception - */ - private Gossip gossip(Fireflies link, int ring) { - roundTimers.tick(); - if (shunned.contains(link.getMember().getId())) { - log.trace("Shunning gossip view: {} with: {} on: {}", currentView(), link.getMember().getId(), - node.getId()); - if (metrics != null) { - metrics.shunnedGossip().mark(); - } - return null; - } - - final SayWhat gossip = stable(() -> SayWhat.newBuilder() - .setView(currentView().toDigeste()) - .setNote(node.getNote().getWrapped()) - .setRing(ring) - .setGossip(commonDigests()) - .build()); - try { - return link.gossip(gossip); - } catch (Throwable e) { - final var p = (Participant) link.getMember(); - if (!viewManagement.joined()) { - log.debug("Exception: {} bootstrap gossiping with:S {} view: {} on: {}", e.getMessage(), p.getId(), - currentView(), node.getId()); - return null; - } - if (e instanceof StatusRuntimeException sre) { - switch (sre.getStatus().getCode()) { - case PERMISSION_DENIED: - log.trace("Rejected gossip: {} view: {} from: {} on: {}", sre.getStatus(), currentView(), p.getId(), - node.getId()); - break; - case RESOURCE_EXHAUSTED: - log.trace("Unavailable for gossip: {} view: {} from: {} on: {}", sre.getStatus(), currentView(), - p.getId(), node.getId()); - break; - default: - log.debug("Error gossiping: {} view: {} from: {} on: {}", sre.getStatus(), p.getId(), currentView(), - node.getId()); - accuse(p, ring, sre); - break; - - } - return null; - } else { - log.debug("Exception gossiping with {} view: {} on: {}", p.getId(), currentView(), node.getId(), e); - accuse(p, ring, e); - return null; - } - } - - } - /** * Handle the gossip response from the destination * - * @param futureSailor + * @param result * @param destination * @param duration * @param scheduler */ private void gossip(Optional result, RingCommunications.Destination destination, Duration duration, ScheduledExecutorService scheduler) { - final var member = destination.member(); try { - if (result.isEmpty()) { - return; - } - - try { - Gossip gossip = result.get(); - if (gossip.hasRedirect()) { - stable(() -> redirect(member, gossip, destination.ring())); - } else if (viewManagement.joined()) { - try { - Update update = stable(() -> response(gossip)); - if (update != null && !update.equals(Update.getDefaultInstance())) { - log.trace("Update for: {} notes: {} accusations: {} joins: {} observations: {} on: {}", - destination.link().getMember().getId(), update.getNotesCount(), - update.getAccusationsCount(), update.getJoinsCount(), - update.getObservationsCount(), node.getId()); - destination.link() - .update(State.newBuilder() - .setView(currentView().toDigeste()) - .setRing(destination.ring()) - .setUpdate(update) - .build()); + if (result.isPresent()) { + final var member = destination.member(); + try { + Gossip gossip = result.get(); + if (gossip.hasRedirect()) { + stable(() -> redirect(member, gossip, destination.ring())); + } else if (viewManagement.joined()) { + try { + Update update = stable(() -> response(gossip)); + if (update != null && !update.equals(Update.getDefaultInstance())) { + log.trace("Update for: {} notes: {} accusations: {} joins: {} observations: {} on: {}", + destination.member().getId(), update.getNotesCount(), + update.getAccusationsCount(), update.getJoinsCount(), + update.getObservationsCount(), node.getId()); + destination.link() + .update(State.newBuilder() + .setView(currentView().toDigeste()) + .setRing(destination.ring()) + .setUpdate(update) + .build()); + } + } catch (StatusRuntimeException e) { + handleSRE("update", destination, member, e); + } + } else { + stable(() -> processUpdates(gossip)); + } + } catch (NoSuchElementException e) { + if (!viewManagement.joined()) { + log.debug("Null bootstrap gossiping with: {} view: {} on: {}", member.getId(), currentView(), + node.getId()); + } else { + if (e.getCause() instanceof StatusRuntimeException sre) { + handleSRE("gossip", destination, member, sre); + } else { + accuse(member, destination.ring(), e); } - } catch (StatusRuntimeException e) { - handleSRE("update", destination, member, e); } - } else { - stable(() -> processUpdates(gossip)); - } - } catch (NoSuchElementException e) { - if (!viewManagement.joined()) { - log.debug("Null bootstrap gossiping with: {} view: {} on: {}", member.getId(), currentView(), - node.getId()); - return; - } - if (e.getCause() instanceof StatusRuntimeException sre) { - handleSRE("gossip", destination, member, sre); - } else { - accuse(member, destination.ring(), e); } } + } finally { - futureGossip = scheduler.schedule(Utils.wrapped(() -> gossip(duration, scheduler), log), duration.toNanos(), - TimeUnit.NANOSECONDS); + futureGossip = scheduler.schedule( + () -> Thread.ofVirtual().start(Utils.wrapped(() -> gossip(duration, scheduler), log)), duration.toNanos(), + TimeUnit.NANOSECONDS); } } @@ -1042,13 +1095,16 @@ private void handleSRE(String type, RingCommunications.Destination bff) { * members * * @param p - * @param digests + * @param bff * @return */ private AccusationGossip processAccusations(BloomFilter bff, double p) { @@ -1145,7 +1201,7 @@ private NoteGossip.Builder processNotes(BloomFilter bff) { * * @param from * @param p - * @param digests + * @param bff */ private NoteGossip processNotes(Digest from, BloomFilter bff, double p) { NoteGossip.Builder builder = processNotes(bff); @@ -1178,8 +1234,7 @@ private ViewChangeGossip.Builder processObservations(BloomFilter bff) { * the inbound digests that the view has more recent information * * @param p - * @param from - * @param digests + * @param bff */ private ViewChangeGossip processObservations(BloomFilter bff, double p) { ViewChangeGossip.Builder builder = processObservations(bff); @@ -1190,16 +1245,6 @@ private ViewChangeGossip processObservations(BloomFilter bff, double p) return builder.build(); } - /** - * Process the updates of the supplied juicy gossip. - * - * @param gossip - */ - private void processUpdates(Gossip gossip) { - processUpdates(gossip.getNotes().getUpdatesList(), gossip.getAccusations().getUpdatesList(), - gossip.getObservations().getUpdatesList(), gossip.getJoins().getUpdatesList()); - } - /** * Process the updates of the supplied juicy gossip. * @@ -1238,31 +1283,7 @@ private void recover(Participant member) { if (context.activate(member)) { log.debug("Recovering: {} cardinality: {} count: {} on: {}", member.getId(), context.cardinality(), context.totalCount(), node.getId()); - } else { - // log.trace("Already active: {} cardinality: {} count: {} on: {}", member.getId(), context.cardinality(), - // context.totalCount(), node.getId()); - } - } - - /** - * Redirect the receiver to the correct ring, processing any new accusations - * - * @param member - * @param gossip - * @param ring - */ - private boolean redirect(Participant member, Gossip gossip, int ring) { - if (!gossip.hasRedirect()) { - log.warn("Redirect from: {} on ring: {} did not contain redirect member note on: {}", member.getId(), ring, - node.getId()); - return false; } - final var redirect = new NoteWrapper(gossip.getRedirect(), digestAlgo); - add(redirect); - processUpdates(gossip); - log.debug("Redirected from: {} to: {} on ring: {} on: {}", member.getId(), redirect.getId(), ring, - node.getId()); - return true; } /** @@ -1278,24 +1299,29 @@ private Gossip redirectTo(Participant member, int ring, Participant successor, D assert member != null; assert successor != null; if (successor.getNote() == null) { - log.debug("Cannot redirect from: {} to: {} on ring: {} as note is null on: {}", node, successor, ring, - node.getId()); + log.debug("Cannot redirect: {} to: {} on ring: {} as note is null on: {}", member.getId(), + successor.getId(), ring, node.getId()); return Gossip.getDefaultInstance(); } var identity = successor.getNote(); if (identity == null) { - log.debug("Cannot redirect from: {} to: {} on ring: {} as note is null on: {}", node, successor, ring, - node.getId()); + log.debug("Cannot redirect: {} to: {} on ring: {} as note is null on: {}", member.getId(), + successor.getId(), ring, node.getId()); return Gossip.getDefaultInstance(); } - return Gossip.newBuilder() - .setRedirect(successor.getNote().getWrapped()) - .setNotes(processNotes(BloomFilter.from(digests.getNoteBff()))) - .setAccusations(processAccusations(BloomFilter.from(digests.getAccusationBff()))) - .setObservations(processObservations(BloomFilter.from(digests.getObservationBff()))) - .setJoins(viewManagement.processJoins(BloomFilter.from(digests.getJoinBiff()))) - .build(); + var gossip = Gossip.newBuilder() + .setRedirect(successor.getNote().getWrapped()) + .setNotes(processNotes(BloomFilter.from(digests.getNoteBff()))) + .setAccusations(processAccusations(BloomFilter.from(digests.getAccusationBff()))) + .setObservations(processObservations(BloomFilter.from(digests.getObservationBff()))) + .setJoins(viewManagement.processJoins(BloomFilter.from(digests.getJoinBiff()))) + .build(); + log.trace("Redirecting: {} to: {} on ring: {} notes: {} acc: {} obv: {} joins: {} on: {}", member.getId(), + successor.getId(), ring, gossip.getNotes().getUpdatesCount(), + gossip.getAccusations().getUpdatesCount(), gossip.getObservations().getUpdatesCount(), + gossip.getJoins().getUpdatesCount(), node.getId()); + return gossip; } /** @@ -1404,7 +1430,7 @@ private void validate(Digest from, SayWhat request) { } private void validate(Digest from, State request) { - var valid = true; + var valid = false; try { validate(from, request.getRing(), Digest.from(request.getView())); valid = true; @@ -1415,31 +1441,46 @@ private void validate(Digest from, State request) { } } + private boolean verify(SelfAddressingIdentifier identifier, JohnHancock signature, ByteString byteString) { + return verify(identifier, signature, BbBackedInputStream.aggregate(byteString)); + } + + private boolean verify(SelfAddressingIdentifier id, JohnHancock signature, InputStream message) { + var verifier = verifiers.verifierFor(id); + if (verifier.isEmpty()) { + return false; + } + return verifier.get().verify(signature, message); + } + + private boolean verify(SelfAddressingIdentifier id, SigningThreshold threshold, JohnHancock signature, + InputStream message) { + var verifier = verifiers.verifierFor(id); + if (verifier.isEmpty()) { + return false; + } + return verifier.get().verify(threshold, signature, message); + } + + @FunctionalInterface public interface ViewLifecycleListener { - /** - * Notification of update to members' event coordinates - * - * @param update - the event coordinates to update - */ - void update(EventCoordinates updated); /** * Notification of a view change event * * @param context - the context for which the view change has occurred * @param viewId - the Digest identity of the new view - * @param joins - the list of joining member's event coordinates + * @param joins - the list of joining member's establishment event * @param leaves - the list of leaving member's ids */ void viewChange(Context context, Digest viewId, List joins, List leaves); } - public record Seed(EventCoordinates coordinates, InetSocketAddress endpoint) { + public record Seed(KeyEvent establishment, InetSocketAddress endpoint) { } public class Node extends Participant implements SigningMember { - private final ControlledIdentifierMember wrapped; public Node(ControlledIdentifierMember wrapped, InetSocketAddress endpoint) { @@ -1488,12 +1529,16 @@ public SignatureAlgorithm algorithm() { return wrapped.algorithm(); } - public ControlledIdentifier getIdentifier() { - return wrapped.getIdentifier(); + public SelfAddressingIdentifier getIdentifier() { + return wrapped.getIdentifier().getIdentifier(); } - public KERL_ kerl() { - return wrapped.kerl(); + @Override + public Seed_ getSeed() { + return Seed_.newBuilder() + .setNote(note.getWrapped()) + .setEstablishment(wrapped.getEvent().toKeyEvent_()) + .build(); } public JohnHancock sign(byte[] message) { @@ -1597,7 +1642,7 @@ void nextNote(Digest view) { void nextNote(long newEpoch, Digest view) { final var current = note; var n = current.newBuilder() - .setCoordinates(wrapped.getEvent().getCoordinates().toEventCoords()) + .setCoordinates(note.getCoordinates().toEventCoords()) .setEpoch(newEpoch) .setMask(ByteString.copyFrom(nextMask().toByteArray())) .setCurrentView(view.toDigeste()) @@ -1676,7 +1721,7 @@ public boolean equals(Object obj) { @Override public Filtered filtered(SigningThreshold threshold, JohnHancock signature, InputStream message) { final var current = note; - return validation.filtered(current.getCoordinates(), threshold, signature, message); + return View.this.filtered(getIdentifier(), threshold, signature, message); } public int getAccusationCount() { @@ -1698,11 +1743,16 @@ public Digest getId() { return id; } + public SelfAddressingIdentifier getIdentifier() { + return note.getIdentifier(); + } + public Seed_ getSeed() { - final var keyState = validation.getKeyState(note.getCoordinates()); + final var establishment = getEvent(getNote().getCoordinates()); return Seed_.newBuilder() .setNote(note.getWrapped()) - .setKeyState(keyState.isEmpty() ? KeyState_.getDefaultInstance() : keyState.get().toKeyState_()) + .setEstablishment( + establishment == null ? KeyEvent_.getDefaultInstance() : establishment.toKeyEvent_()) .build(); } @@ -1730,13 +1780,13 @@ public boolean verify(JohnHancock signature, InputStream message) { if (current == null) { return true; } - return validation.verify(current.getCoordinates(), signature, message); + return View.this.verify(getIdentifier(), signature, message); } @Override public boolean verify(SigningThreshold threshold, JohnHancock signature, InputStream message) { final var current = note; - return validation.verify(current.getCoordinates(), threshold, signature, message); + return View.this.verify(getIdentifier(), threshold, signature, message); } /** @@ -1755,15 +1805,15 @@ void addAccusation(AccusationWrapper accusation) { return; } if (n.getEpoch() != accusation.getEpoch()) { - log.trace("Invalid epoch discarding accusation from {} on {} ring {} on: {}", accusation.getAccuser(), - getId(), ringNumber, node.getId()); + log.trace("Invalid epoch discarding accusation from: {} context: {} ring {} on: {}", + accusation.getAccuser(), getId(), ringNumber, node.getId()); return; } if (n.getMask().get(ringNumber)) { validAccusations[ringNumber] = accusation; if (log.isDebugEnabled()) { - log.debug("Member {} is accusing {} ring: {} on: {}", accusation.getAccuser(), getId(), ringNumber, - node.getId()); + log.debug("Member: {} is accusing: {} context: {} ring: {} on: {}", accusation.getAccuser(), + accusation.getAccused(), getId(), ringNumber, node.getId()); } } } @@ -1774,7 +1824,8 @@ void addAccusation(AccusationWrapper accusation) { void clearAccusations() { for (var acc : validAccusations) { if (acc != null) { - log.trace("Clearing accusations for: {} on: {}", getId(), node.getId()); + log.trace("Clearing accusations for: {} context: {} on: {}", acc.getAccused(), getId(), + node.getId()); break; } } @@ -1804,7 +1855,7 @@ NoteWrapper getNote() { void invalidateAccusationOnRing(int index) { validAccusations[index] = null; - log.trace("Invalidating accusations of: {} ring: {} on: {}", getId(), index, node.getId()); + log.trace("Invalidating accusations context: {} ring: {} on: {}", getId(), index, node.getId()); } boolean isAccused() { @@ -1852,12 +1903,26 @@ public void join(Join join, Digest from, StreamObserver responseObserve viewManagement.join(join, from, responseObserver, timer); } + @Override + public KeyState_ keyState(IdentAndSeq request, Digest from) { + var identifier = Identifier.from(request.getIdentifier()); + var seq = ULong.valueOf(request.getSequenceNumber()); + + if (!viewManagement.joined()) { + log.info("Not yet joined!, ignoring key state request: {}:{} from: {} on: {}", identifier, seq, from, + node.getId()); + return KeyState_.getDefaultInstance(); + } + + var keyState = validation.keyState(identifier, seq); + return keyState == null ? KeyState_.getDefaultInstance() : keyState.toKeyState_(); + } + /** * The first message in the anti-entropy protocol. Process any digests from the inbound gossip digest. Respond * with the Gossip that represents the digests newer or not known in this view, as well as updates from this * node based on out of date information in the supplied digests. * - * @param ring - the index of the gossip ring the inbound member is gossiping on * @param request - the Gossip from our partner * @return Teh response for Moar gossip - updates this node has which the sender is out of touch with, and * digests from the sender that this node would like updated. @@ -1866,29 +1931,36 @@ public void join(Join join, Digest from, StreamObserver responseObserve public Gossip rumors(SayWhat request, Digest from) { if (!introduced.get()) { log.trace("Not introduced!, ring: {} from: {} on: {}", request.getRing(), from, node.getId()); - return Gossip.getDefaultInstance(); + throw new StatusRuntimeException(Status.FAILED_PRECONDITION.withDescription( + "Not introduced!, ring: %s from: %s on: %s".formatted(request.getRing(), from, node.getId()))); } return stable(() -> { validate(from, request); final var ring = request.getRing(); if (!context.validRing(ring)) { log.debug("invalid ring: {} from: {} on: {}", ring, from, node.getId()); - return Gossip.getDefaultInstance(); + throw new StatusRuntimeException(Status.FAILED_PRECONDITION.withDescription( + "invalid ring: %s from: %s on: %s".formatted(ring, from, node.getId()))); } + Participant member = context.getActiveMember(from); if (member == null) { add(new NoteWrapper(request.getNote(), digestAlgo)); member = context.getActiveMember(from); if (member == null) { - return Gossip.getDefaultInstance(); + log.debug("Not active member: {} on: {}", from, node.getId()); + throw new StatusRuntimeException(Status.PERMISSION_DENIED.withDescription( + "Not active member: %s on: %s".formatted(from, node.getId()))); } } + Participant successor = context.ring(ring).successor(member, m -> context.isActive(m.getId())); if (successor == null) { log.debug("No active successor on ring: {} from: {} on: {}", ring, from, node.getId()); - throw new StatusRuntimeException( - Status.FAILED_PRECONDITION.withDescription("No successor of: " + from)); + throw new StatusRuntimeException(Status.FAILED_PRECONDITION.withDescription( + "No active successor on ring: %s from: %s on: %s".formatted(ring, from, node.getId()))); } + Gossip g; final var digests = request.getGossip(); if (!successor.equals(node)) { @@ -1926,7 +1998,7 @@ public Redirect seed(Registration registration, Digest from) { /** * The third and final message in the anti-entropy protocol. Process the inbound update from another member. * - * @param state - update state + * @param request - update state * @param from */ @Override @@ -1961,5 +2033,19 @@ public void update(State request, Digest from) { } }); } + + @Override + public Validation validateCoords(EventCoords request, Digest from) { + var coordinates = EventCoordinates.from(request); + if (!viewManagement.joined()) { + log.info("Not yet joined!, ignoring validation request: {} from: {} on: {}", from, coordinates, + node.getId()); + return Validation.newBuilder().setResult(false).build(); + } + log.info("Validating event: {} for: {} on: {}", request, from, node.getId()); + var validate = validation.validate(coordinates); + log.info("Returning validate: {}:{} to: {} on: {}", coordinates, validate, from, node.getId()); + return Validation.newBuilder().setResult(validate).build(); + } } } diff --git a/fireflies/src/main/java/com/salesforce/apollo/fireflies/ViewManagement.java b/fireflies/src/main/java/com/salesforce/apollo/fireflies/ViewManagement.java index 930657c597..09bb5878f0 100644 --- a/fireflies/src/main/java/com/salesforce/apollo/fireflies/ViewManagement.java +++ b/fireflies/src/main/java/com/salesforce/apollo/fireflies/ViewManagement.java @@ -8,8 +8,6 @@ import com.codahale.metrics.Timer; import com.google.common.base.Objects; -import com.salesfoce.apollo.fireflies.proto.*; -import com.salesfoce.apollo.fireflies.proto.Update.Builder; import com.salesforce.apollo.bloomFilters.BloomFilter; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; @@ -17,10 +15,15 @@ import com.salesforce.apollo.fireflies.Binding.Bound; import com.salesforce.apollo.fireflies.View.Node; import com.salesforce.apollo.fireflies.View.Participant; +import com.salesforce.apollo.fireflies.comm.gossip.Fireflies; +import com.salesforce.apollo.fireflies.proto.*; +import com.salesforce.apollo.fireflies.proto.Update.Builder; import com.salesforce.apollo.membership.Context; import com.salesforce.apollo.membership.ReservoirSampler; +import com.salesforce.apollo.ring.SliceIterator; import com.salesforce.apollo.stereotomy.EventCoordinates; import com.salesforce.apollo.utils.Entropy; +import com.salesforce.apollo.utils.Utils; import io.grpc.Status; import io.grpc.StatusRuntimeException; import io.grpc.stub.StreamObserver; @@ -29,12 +32,11 @@ import java.time.Duration; import java.util.*; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.ConcurrentSkipListMap; -import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.stream.Collectors; @@ -58,6 +60,7 @@ public class ViewManagement { private final Map>> pendingJoins = new ConcurrentSkipListMap<>(); private final View view; private final AtomicReference vote = new AtomicReference<>(); + private final Lock joinLock = new ReentrantLock(); private boolean bootstrap; private AtomicReference currentView = new AtomicReference<>(); private AtomicReference diadem = new AtomicReference<>(); @@ -83,7 +86,7 @@ boolean addJoin(Digest id, NoteWrapper note) { return joins.put(id, note) == null; } - void bootstrap(NoteWrapper nw, final ScheduledExecutorService sched, final Duration dur) { + void bootstrap(NoteWrapper nw, final Duration dur) { joins.put(nw.getId(), nw); context.activate(node); @@ -92,7 +95,7 @@ void bootstrap(NoteWrapper nw, final ScheduledExecutorService sched, final Durat new Ballot(currentView(), Collections.emptyList(), Collections.singletonList(node.getId()), digestAlgo))); view.scheduleViewChange(); - view.schedule(dur, sched); + view.schedule(dur); log.info("Bootstrapped view: {} cardinality: {} count: {} context: {} on: {}", currentView(), context.cardinality(), context.activeCount(), context.getId(), node.getId()); @@ -148,6 +151,11 @@ void install(Ballot ballot) { ballot.leaving.stream().filter(d -> !node.getId().equals(d)).forEach(p -> view.remove(p)); + final var seedSet = context.sample(params.maximumTxfr(), Entropy.bitsStream(), node.getId()) + .stream() + .map(p -> p.note.getWrapped()) + .collect(Collectors.toSet()); + context.rebalance(context.totalCount() + ballot.joining.size()); var joining = new ArrayList(); var pending = ballot.joining() @@ -169,14 +177,7 @@ void install(Ballot ballot) { HexBloom.construct(context.memberCount(), context.allMembers().map(p -> p.getId()), view.bootstrapView(), params.crowns())); view.reset(); - - var seedSet = new ArrayList(); // complete all pending joins - context.ring(Entropy.nextBitsStreamInt(context.getRingCount())) - .stream() - .limit(params.maximumTxfr()) - .map(p -> p.getNote().getWrapped()) - .forEach(sn -> seedSet.add(sn)); pending.forEach(r -> { try { r.accept(seedSet); @@ -196,48 +197,52 @@ void install(Ballot ballot) { view.notifyListeners(joining, ballot.leaving); } - boolean isJoined() { - return joined(); - } - /** * Formally join the view. Calculate the HEX-BLOOM crown and view, fail and stop if does not match currentView */ - synchronized void join() { - assert context.totalCount() == context.cardinality(); - if (joined()) { - return; - } - var current = currentView(); - var calculated = HexBloom.construct(context.totalCount(), context.allMembers().map(p -> p.getId()), - view.bootstrapView(), params.crowns()); - - if (!current.equals(calculated.compactWrapped())) { - log.error("Crown: {} does not produce view: {} cardinality: {} count: {} on: {}", - calculated.compactWrapped(), currentView(), context.cardinality(), context.totalCount(), - node.getId()); - view.stop(); - throw new IllegalStateException("Invalid crown"); - } - setDiadem(calculated); - view.notifyListeners(context.allMembers().map(p -> p.note.getCoordinates()).toList(), Collections.emptyList()); - onJoined.complete(null); + void join() { + joinLock.lock(); + try { + assert context.totalCount() == context.cardinality(); + if (joined()) { + return; + } + var current = currentView(); + log.info("Joining view: {} cardinality: {} count: {} on: {}", current, context.cardinality(), + context.totalCount(), node.getId()); + var calculated = HexBloom.construct(context.totalCount(), context.allMembers().map(p -> p.getId()), + view.bootstrapView(), params.crowns()); + + if (!current.equals(calculated.compactWrapped())) { + log.error("Crown: {} does not produce view: {} cardinality: {} count: {} on: {}", + calculated.compactWrapped(), currentView(), context.cardinality(), context.totalCount(), + node.getId()); + view.stop(); + throw new IllegalStateException("Invalid crown"); + } + setDiadem(calculated); + view.notifyListeners(context.allMembers().map(p -> p.note.getCoordinates()).toList(), + Collections.emptyList()); - view.scheduleViewChange(); + view.scheduleViewChange(); - if (metrics != null) { - metrics.viewChanges().mark(); + if (metrics != null) { + metrics.viewChanges().mark(); + } + log.info("Joined view: {} cardinality: {} count: {} on: {}", current, context.cardinality(), + context.totalCount(), node.getId()); + onJoined.complete(null); + } finally { + joinLock.unlock(); } - log.info("Joined view: {} cardinality: {} count: {} on: {}", current, context.cardinality(), - context.totalCount(), node.getId()); } void join(Join join, Digest from, StreamObserver responseObserver, Timer.Context timer) { final var joinView = Digest.from(join.getView()); if (!joined()) { log.trace("Not joined, ignored join of view: {} from: {} on: {}", joinView, from, node.getId()); - responseObserver.onNext(Gateway.getDefaultInstance()); - responseObserver.onCompleted(); + responseObserver.onError(new StatusRuntimeException(Status.FAILED_PRECONDITION.withDescription( + "Not joined, ignored join of view: %s from: %s on: %s".formatted(joinView, from, node.getId())))); return; } view.stable(() -> { @@ -245,7 +250,7 @@ void join(Join join, Digest from, StreamObserver responseObserver, Time var note = new NoteWrapper(join.getNote(), digestAlgo); if (!from.equals(note.getId())) { responseObserver.onError( - new StatusRuntimeException(Status.INVALID_ARGUMENT.withDescription("Member not match note"))); + new StatusRuntimeException(Status.INVALID_ARGUMENT.withDescription("Member does not match note"))); return; } log.debug("Join requested from: {} view: {} context: {} cardinality: {} on: {}", from, thisView, @@ -253,7 +258,10 @@ void join(Join join, Digest from, StreamObserver responseObserver, Time if (contains(from)) { log.debug("Already a member: {} view: {} context: {} cardinality: {} on: {}", from, thisView, context.getId(), context.cardinality(), node.getId()); - joined(Collections.emptySet(), from, responseObserver, timer); + joined(context.sample(params.maximumTxfr(), Entropy.bitsStream(), node.getId()) + .stream() + .map(p -> p.note.getWrapped()) + .toList(), from, responseObserver, timer); return; } if (!thisView.equals(joinView)) { @@ -272,7 +280,7 @@ void join(Join join, Digest from, StreamObserver responseObserver, Time new StatusRuntimeException(Status.RESOURCE_EXHAUSTED.withDescription("No room at the inn"))); return; } - pendingJoins.put(from, seeds -> { + pendingJoins.computeIfAbsent(from, d -> seeds -> { log.info("Gateway established for: {} view: {} context: {} cardinality: {} on: {}", from, currentView(), context.getId(), context.cardinality(), node.getId()); joined(seeds, from, responseObserver, timer); @@ -283,8 +291,7 @@ void join(Join join, Digest from, StreamObserver responseObserver, Time }); } - BiConsumer join(ScheduledExecutorService scheduler, Duration duration, - Timer.Context timer) { + BiConsumer join(Duration duration, Timer.Context timer) { return (bound, t) -> { view.viewChange(() -> { final var hex = bound.view(); @@ -302,33 +309,30 @@ void join(Join join, Digest from, StreamObserver responseObserver, Time currentView.set(hex.compact()); bound.successors().forEach(nw -> view.addToView(nw)); + bound.initialSeedSet().forEach(nw -> view.addToView(nw)); view.reset(); context.allMembers().forEach(p -> p.clearAccusations()); - view.introduced(); - - view.schedule(duration, scheduler); + view.schedule(duration); if (timer != null) { timer.stop(); } - view.introduced(); + view.introduced(); log.info("Currently joining view: {} seeds: {} cardinality: {} count: {} on: {}", currentView.get(), bound.successors().size(), context.cardinality(), context.totalCount(), node.getId()); if (context.totalCount() == context.cardinality()) { join(); + } else { + populate(new ArrayList(context.activeMembers())); } }); }; } - boolean joined() { - return onJoined.isDone(); - } - void joinUpdatesFor(BloomFilter joinBff, Builder builder) { joins.entrySet() .stream() @@ -337,6 +341,10 @@ void joinUpdatesFor(BloomFilter joinBff, Builder builder) { .forEach(e -> builder.addJoins(e.getValue().getWrapped())); } + boolean joined() { + return onJoined.isDone(); + } + /** * start a view change if there's any offline members or joining members */ @@ -348,6 +356,38 @@ void maybeViewChange() { } } + void populate(List sample) { + var populate = new SliceIterator("Populate: " + context.getId(), node, sample, view.comm); + var repopulate = new AtomicReference(); + var scheduler = Executors.newScheduledThreadPool(1, Thread.ofVirtual().factory()); + repopulate.set(() -> { + populate.iterate((link, m) -> { + log.debug("Populating: {} contacting: {} on: {}", context.getId(), link.getMember().getId(), + node.getId()); + view.tick(); + return view.gossip(link, 0); + }, (futureSailor, link, m) -> { + futureSailor.ifPresent(g -> { + if (g.hasRedirect()) { + final Participant member = (Participant) link.getMember(); + if (g.hasRedirect()) { + view.stable(() -> view.redirect(member, g, 0)); + } + } else { + view.stable(() -> view.processUpdates(g)); + } + }); + return !joined(); + }, () -> { + if (!joined()) { + scheduler.schedule(() -> Thread.ofVirtual().start(Utils.wrapped(repopulate.get(), log)), 500, + TimeUnit.MILLISECONDS); + } + }, scheduler, Duration.ofMillis(500)); + }); + repopulate.get().run(); + } + JoinGossip.Builder processJoins(BloomFilter bff) { JoinGossip.Builder builder = JoinGossip.newBuilder(); @@ -404,13 +444,13 @@ Redirect seed(Registration registration, Digest from) { } return view.stable(() -> { var newMember = view.new Participant(note.getId()); - final var successors = new TreeSet(context.successors(newMember, m -> context.isActive(m))); + final var sample = context.sample(params.maximumTxfr(), Entropy.bitsStream(), (Digest) null); - log.debug("Member seeding: {} view: {} context: {} successors: {} on: {}", newMember.getId(), currentView(), - context.getId(), successors.size(), node.getId()); + log.info("Member seeding: {} view: {} context: {} sample: {} on: {}", newMember.getId(), currentView(), + context.getId(), sample.size(), node.getId()); return Redirect.newBuilder() .setView(currentView().toDigeste()) - .addAllSuccessors(successors.stream().filter(p -> p != null).map(p -> p.getSeed()).toList()) + .addAllSample(sample.stream().filter(p -> p != null).map(p -> p.getSeed()).toList()) .setCardinality(context.cardinality()) .setBootstrap(bootstrap) .setRings(context.getRingCount()) @@ -461,10 +501,25 @@ private void initiateViewChange() { private void joined(Collection seedSet, Digest from, StreamObserver responseObserver, Timer.Context timer) { - final var builder = Gateway.newBuilder().addAllInitialSeedSet(seedSet).setDiadem(diadem.get().toHexBloome()); - context.successors(from, m -> context.isActive(m)) - .forEach(p -> builder.addInitialSeedSet(p.getNote().getWrapped())); - var gateway = builder.build(); + var unique = new HashSet(seedSet); + final var initialSeeds = new ArrayList(seedSet); + final var successors = new HashSet(); + + context.successors(from, m -> context.isActive(m)).forEach(p -> { + var sn = p.getNote().getWrapped(); + if (unique.add(sn)) { + initialSeeds.add(sn); + } + successors.add(sn); + }); + var gateway = Gateway.newBuilder() + .addAllInitialSeedSet(initialSeeds) + .setTrust(BootstrapTrust.newBuilder() + .addAllSuccessors(successors) + .setDiadem(diadem.get().toHexBloome())) + .build(); + log.info("Gateway initial seeding: {} successors: {} for: {} on: {}", gateway.getInitialSeedSetCount(), + successors.size(), from, node.getId()); responseObserver.onNext(gateway); responseObserver.onCompleted(); if (timer != null) { diff --git a/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/entrance/Entrance.java b/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/entrance/Entrance.java index ca2c1d2042..b2cbd3f4b9 100644 --- a/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/entrance/Entrance.java +++ b/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/entrance/Entrance.java @@ -6,25 +6,23 @@ */ package com.salesforce.apollo.fireflies.comm.entrance; -import java.io.IOException; -import java.time.Duration; - -import com.google.common.util.concurrent.ListenableFuture; -import com.salesfoce.apollo.fireflies.proto.Gateway; -import com.salesfoce.apollo.fireflies.proto.Join; -import com.salesfoce.apollo.fireflies.proto.Redirect; -import com.salesfoce.apollo.fireflies.proto.Registration; import com.salesforce.apollo.archipelago.Link; import com.salesforce.apollo.fireflies.View.Node; +import com.salesforce.apollo.fireflies.proto.*; import com.salesforce.apollo.membership.Member; +import com.salesforce.apollo.stereotomy.event.proto.EventCoords; +import com.salesforce.apollo.stereotomy.event.proto.IdentAndSeq; +import com.salesforce.apollo.stereotomy.event.proto.KeyState_; + +import java.io.IOException; +import java.time.Duration; /** * @author hal.hildebrand - * */ public interface Entrance extends Link { - static Entrance getLocalLoopback(Node node) { + static Entrance getLocalLoopback(Node node, EntranceService service) { return new Entrance() { @Override @@ -37,18 +35,32 @@ public Member getMember() { } @Override - public ListenableFuture join(Join join, Duration timeout) { + public Gateway join(Join join, Duration timeout) { return null; } @Override - public ListenableFuture seed(Registration registration) { + public KeyState_ keyState(IdentAndSeq idAndSeq) { return null; } + + @Override + public Redirect seed(Registration registration) { + return null; + } + + @Override + public Validation validate(EventCoords coords) { + return service.validateCoords(coords, getMember().getId()); + } }; } - ListenableFuture join(Join join, Duration timeout); + Gateway join(Join join, Duration timeout); + + KeyState_ keyState(IdentAndSeq idAndSeq); + + Redirect seed(Registration registration); - ListenableFuture seed(Registration registration); + Validation validate(EventCoords coords); } diff --git a/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/entrance/EntranceClient.java b/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/entrance/EntranceClient.java index 77ef1c587d..53953a4bf6 100644 --- a/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/entrance/EntranceClient.java +++ b/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/entrance/EntranceClient.java @@ -6,42 +6,38 @@ */ package com.salesforce.apollo.fireflies.comm.entrance; -import java.time.Duration; -import java.util.concurrent.TimeUnit; - -import com.google.common.util.concurrent.ListenableFuture; -import com.salesfoce.apollo.fireflies.proto.EntranceGrpc; -import com.salesfoce.apollo.fireflies.proto.EntranceGrpc.EntranceFutureStub; -import com.salesfoce.apollo.fireflies.proto.Gateway; -import com.salesfoce.apollo.fireflies.proto.Join; -import com.salesfoce.apollo.fireflies.proto.Redirect; -import com.salesfoce.apollo.fireflies.proto.Registration; import com.salesforce.apollo.archipelago.ManagedServerChannel; import com.salesforce.apollo.archipelago.ServerConnectionCache.CreateClientCommunications; import com.salesforce.apollo.fireflies.FireflyMetrics; +import com.salesforce.apollo.fireflies.proto.*; import com.salesforce.apollo.membership.Member; +import com.salesforce.apollo.stereotomy.event.proto.EventCoords; +import com.salesforce.apollo.stereotomy.event.proto.IdentAndSeq; +import com.salesforce.apollo.stereotomy.event.proto.KeyState_; + +import java.time.Duration; +import java.util.concurrent.TimeUnit; /** * @author hal.hildebrand - * */ public class EntranceClient implements Entrance { - public static CreateClientCommunications getCreate(FireflyMetrics metrics) { - return (c) -> new EntranceClient(c, metrics); - - } - - private final ManagedServerChannel channel; - private final EntranceFutureStub client; - private final FireflyMetrics metrics; + private final ManagedServerChannel channel; + private final EntranceGrpc.EntranceBlockingStub client; + private final FireflyMetrics metrics; public EntranceClient(ManagedServerChannel channel, FireflyMetrics metrics) { this.channel = channel; - this.client = EntranceGrpc.newFutureStub(channel).withCompression("gzip"); + this.client = EntranceGrpc.newBlockingStub(channel).withCompression("gzip"); this.metrics = metrics; } + public static CreateClientCommunications getCreate(FireflyMetrics metrics) { + return (c) -> new EntranceClient(c, metrics); + + } + @Override public void close() { channel.release(); @@ -53,48 +49,54 @@ public Member getMember() { } @Override - public ListenableFuture join(Join join, Duration timeout) { + public Gateway join(Join join, Duration timeout) { if (metrics != null) { var serializedSize = join.getSerializedSize(); metrics.outboundBandwidth().mark(serializedSize); metrics.outboundJoin().update(serializedSize); } - ListenableFuture result = client.withDeadlineAfter(timeout.toNanos(), TimeUnit.NANOSECONDS).join(join); - result.addListener(() -> { - if (metrics != null) { - try { - var serializedSize = result.get().getSerializedSize(); - metrics.inboundBandwidth().mark(serializedSize); - metrics.inboundGateway().update(serializedSize); - } catch (Throwable e) { - // nothing - } + Gateway result = client.withDeadlineAfter(timeout.toNanos(), TimeUnit.NANOSECONDS).join(join); + if (metrics != null) { + try { + var serializedSize = result.getSerializedSize(); + metrics.inboundBandwidth().mark(serializedSize); + metrics.inboundGateway().update(serializedSize); + } catch (Throwable e) { + // nothing } - }, r -> r.run()); + } return result; } @Override - public ListenableFuture seed(Registration registration) { + public KeyState_ keyState(IdentAndSeq idAndSeq) { + return client.keyState(idAndSeq); + } + + @Override + public Redirect seed(Registration registration) { if (metrics != null) { var serializedSize = registration.getSerializedSize(); metrics.outboundBandwidth().mark(serializedSize); metrics.outboundSeed().update(serializedSize); } - ListenableFuture result = client.seed(registration); - result.addListener(() -> { - if (metrics != null) { - try { - var serializedSize = result.get().getSerializedSize(); - metrics.inboundBandwidth().mark(serializedSize); - metrics.inboundRedirect().update(serializedSize); - } catch (Throwable e) { - // nothing - } + Redirect result = client.seed(registration); + if (metrics != null) { + try { + var serializedSize = result.getSerializedSize(); + metrics.inboundBandwidth().mark(serializedSize); + metrics.inboundRedirect().update(serializedSize); + } catch (Throwable e) { + // nothing } - }, r -> r.run()); + } return result; } + @Override + public Validation validate(EventCoords coords) { + return client.validate(coords); + } + } diff --git a/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/entrance/EntranceServer.java b/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/entrance/EntranceServer.java index 010009f971..7032592507 100644 --- a/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/entrance/EntranceServer.java +++ b/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/entrance/EntranceServer.java @@ -7,28 +7,26 @@ package com.salesforce.apollo.fireflies.comm.entrance; import com.codahale.metrics.Timer.Context; -import com.salesfoce.apollo.fireflies.proto.EntranceGrpc.EntranceImplBase; -import com.salesfoce.apollo.fireflies.proto.Gateway; -import com.salesfoce.apollo.fireflies.proto.Join; -import com.salesfoce.apollo.fireflies.proto.Redirect; -import com.salesfoce.apollo.fireflies.proto.Registration; import com.salesforce.apollo.archipelago.RoutableService; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.fireflies.FireflyMetrics; import com.salesforce.apollo.fireflies.View.Service; +import com.salesforce.apollo.fireflies.proto.EntranceGrpc.EntranceImplBase; +import com.salesforce.apollo.fireflies.proto.*; import com.salesforce.apollo.protocols.ClientIdentity; - +import com.salesforce.apollo.stereotomy.event.proto.EventCoords; +import com.salesforce.apollo.stereotomy.event.proto.IdentAndSeq; +import com.salesforce.apollo.stereotomy.event.proto.KeyState_; import io.grpc.stub.StreamObserver; /** * @author hal.hildebrand - * */ public class EntranceServer extends EntranceImplBase { - private ClientIdentity identity; private final FireflyMetrics metrics; private final RoutableService router; + private final ClientIdentity identity; public EntranceServer(ClientIdentity identity, RoutableService r, FireflyMetrics metrics) { this.metrics = metrics; @@ -50,8 +48,41 @@ public void join(Join request, StreamObserver responseObserver) { return; } router.evaluate(responseObserver, s -> { - // async handling - s.join(request, from, responseObserver, timer); + try { + s.join(request, from, responseObserver, timer); + } catch (Throwable t) { + responseObserver.onError(t); + } + }); + } + + @Override + public void keyState(IdentAndSeq request, StreamObserver responseObserver) { + if (metrics != null) { + var serializedSize = request.getSerializedSize(); + metrics.inboundBandwidth().mark(serializedSize); + metrics.inboundSeed().update(serializedSize); + } + Digest from = identity.getFrom(); + if (from == null) { + responseObserver.onError(new IllegalStateException("Member has been removed")); + return; + } + router.evaluate(responseObserver, s -> { + KeyState_ r; + try { + r = s.keyState(request, from); + } catch (Throwable t) { + responseObserver.onError(t); + return; + } + responseObserver.onNext(r); + responseObserver.onCompleted(); + if (metrics != null) { + var serializedSize = r.getSerializedSize(); + metrics.outboundBandwidth().mark(serializedSize); + metrics.outboundRedirect().update(serializedSize); + } }); } @@ -69,7 +100,13 @@ public void seed(Registration request, StreamObserver responseObserver return; } router.evaluate(responseObserver, s -> { - var r = s.seed(request, from); + Redirect r; + try { + r = s.seed(request, from); + } catch (Throwable t) { + responseObserver.onError(t); + return; + } responseObserver.onNext(r); responseObserver.onCompleted(); if (timer != null) { @@ -80,4 +117,24 @@ public void seed(Registration request, StreamObserver responseObserver } }); } + + @Override + public void validate(EventCoords request, StreamObserver responseObserver) { + Digest from = identity.getFrom(); + if (from == null) { + responseObserver.onError(new IllegalStateException("Member has been removed")); + return; + } + router.evaluate(responseObserver, s -> { + Validation r; + try { + r = s.validateCoords(request, from); + } catch (Throwable t) { + responseObserver.onError(t); + return; + } + responseObserver.onNext(r); + responseObserver.onCompleted(); + }); + } } diff --git a/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/entrance/EntranceService.java b/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/entrance/EntranceService.java index c2c507447b..0c2f3dff14 100644 --- a/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/entrance/EntranceService.java +++ b/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/entrance/EntranceService.java @@ -7,22 +7,23 @@ package com.salesforce.apollo.fireflies.comm.entrance; import com.codahale.metrics.Timer.Context; -import com.salesfoce.apollo.fireflies.proto.Gateway; -import com.salesfoce.apollo.fireflies.proto.Join; -import com.salesfoce.apollo.fireflies.proto.Redirect; -import com.salesfoce.apollo.fireflies.proto.Registration; import com.salesforce.apollo.cryptography.Digest; - +import com.salesforce.apollo.fireflies.proto.*; +import com.salesforce.apollo.stereotomy.event.proto.EventCoords; +import com.salesforce.apollo.stereotomy.event.proto.IdentAndSeq; +import com.salesforce.apollo.stereotomy.event.proto.KeyState_; import io.grpc.stub.StreamObserver; /** * @author hal.hildebrand - * */ public interface EntranceService { void join(Join request, Digest from, StreamObserver responseObserver, Context timer); + KeyState_ keyState(IdentAndSeq request, Digest from); + Redirect seed(Registration request, Digest from); + Validation validateCoords(EventCoords request, Digest from); } diff --git a/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/gossip/FFService.java b/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/gossip/FFService.java index 3c205fb6df..33232676e1 100644 --- a/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/gossip/FFService.java +++ b/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/gossip/FFService.java @@ -6,14 +6,13 @@ */ package com.salesforce.apollo.fireflies.comm.gossip; -import com.salesfoce.apollo.fireflies.proto.Gossip; -import com.salesfoce.apollo.fireflies.proto.SayWhat; -import com.salesfoce.apollo.fireflies.proto.State; +import com.salesforce.apollo.fireflies.proto.Gossip; +import com.salesforce.apollo.fireflies.proto.SayWhat; +import com.salesforce.apollo.fireflies.proto.State; import com.salesforce.apollo.cryptography.Digest; /** * @author hal.hildebrand - * */ public interface FFService { diff --git a/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/gossip/FfClient.java b/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/gossip/FfClient.java index 79f08cfa71..5ca8c008e8 100644 --- a/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/gossip/FfClient.java +++ b/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/gossip/FfClient.java @@ -7,10 +7,10 @@ package com.salesforce.apollo.fireflies.comm.gossip; import com.codahale.metrics.Timer.Context; -import com.salesfoce.apollo.fireflies.proto.FirefliesGrpc; -import com.salesfoce.apollo.fireflies.proto.Gossip; -import com.salesfoce.apollo.fireflies.proto.SayWhat; -import com.salesfoce.apollo.fireflies.proto.State; +import com.salesforce.apollo.fireflies.proto.FirefliesGrpc; +import com.salesforce.apollo.fireflies.proto.Gossip; +import com.salesforce.apollo.fireflies.proto.SayWhat; +import com.salesforce.apollo.fireflies.proto.State; import com.salesforce.apollo.archipelago.ManagedServerChannel; import com.salesforce.apollo.archipelago.ServerConnectionCache.CreateClientCommunications; import com.salesforce.apollo.fireflies.FireflyMetrics; @@ -22,9 +22,9 @@ */ public class FfClient implements Fireflies { - private final ManagedServerChannel channel; + private final ManagedServerChannel channel; private final FirefliesGrpc.FirefliesBlockingStub client; - private final FireflyMetrics metrics; + private final FireflyMetrics metrics; public FfClient(ManagedServerChannel channel, FireflyMetrics metrics) { this.channel = channel; diff --git a/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/gossip/FfServer.java b/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/gossip/FfServer.java index 94cee1098e..3944a7b6b0 100644 --- a/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/gossip/FfServer.java +++ b/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/gossip/FfServer.java @@ -8,10 +8,10 @@ import com.codahale.metrics.Timer.Context; import com.google.protobuf.Empty; -import com.salesfoce.apollo.fireflies.proto.FirefliesGrpc.FirefliesImplBase; -import com.salesfoce.apollo.fireflies.proto.Gossip; -import com.salesfoce.apollo.fireflies.proto.SayWhat; -import com.salesfoce.apollo.fireflies.proto.State; +import com.salesforce.apollo.fireflies.proto.FirefliesGrpc.FirefliesImplBase; +import com.salesforce.apollo.fireflies.proto.Gossip; +import com.salesforce.apollo.fireflies.proto.SayWhat; +import com.salesforce.apollo.fireflies.proto.State; import com.salesforce.apollo.archipelago.RoutableService; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.fireflies.FireflyMetrics; @@ -23,7 +23,6 @@ /** * @author hal.hildebrand - * */ public class FfServer extends FirefliesImplBase { diff --git a/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/gossip/Fireflies.java b/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/gossip/Fireflies.java index 444ecd9cd6..5fd6bc2fd0 100644 --- a/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/gossip/Fireflies.java +++ b/fireflies/src/main/java/com/salesforce/apollo/fireflies/comm/gossip/Fireflies.java @@ -6,9 +6,9 @@ */ package com.salesforce.apollo.fireflies.comm.gossip; -import com.salesfoce.apollo.fireflies.proto.Gossip; -import com.salesfoce.apollo.fireflies.proto.SayWhat; -import com.salesfoce.apollo.fireflies.proto.State; +import com.salesforce.apollo.fireflies.proto.Gossip; +import com.salesforce.apollo.fireflies.proto.SayWhat; +import com.salesforce.apollo.fireflies.proto.State; import com.salesforce.apollo.archipelago.Link; import com.salesforce.apollo.fireflies.View.Node; import com.salesforce.apollo.membership.Member; diff --git a/fireflies/src/test/java/com/salesforce/apollo/fireflies/ChurnTest.java b/fireflies/src/test/java/com/salesforce/apollo/fireflies/ChurnTest.java index 4f49ded588..0d1cc40769 100644 --- a/fireflies/src/test/java/com/salesforce/apollo/fireflies/ChurnTest.java +++ b/fireflies/src/test/java/com/salesforce/apollo/fireflies/ChurnTest.java @@ -18,9 +18,7 @@ import com.salesforce.apollo.fireflies.View.Seed; import com.salesforce.apollo.membership.Context; import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; -import com.salesforce.apollo.stereotomy.ControlledIdentifier; -import com.salesforce.apollo.stereotomy.EventValidation; -import com.salesforce.apollo.stereotomy.StereotomyImpl; +import com.salesforce.apollo.stereotomy.*; import com.salesforce.apollo.stereotomy.identifier.SelfAddressingIdentifier; import com.salesforce.apollo.stereotomy.mem.MemKERL; import com.salesforce.apollo.stereotomy.mem.MemKeyStore; @@ -34,7 +32,6 @@ import java.time.Duration; import java.util.*; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; @@ -51,6 +48,7 @@ public class ChurnTest { private static final int CARDINALITY = 100; private static final double P_BYZ = 0.3; private static Map> identities; + private static KERL.AppendKERL kerl; private List communications = new ArrayList<>(); private List gateways = new ArrayList<>(); private Map members; @@ -62,7 +60,8 @@ public class ChurnTest { public static void beforeClass() throws Exception { var entropy = SecureRandom.getInstance("SHA1PRNG"); entropy.setSeed(new byte[] { 6, 6, 6 }); - var stereotomy = new StereotomyImpl(new MemKeyStore(), new MemKERL(DigestAlgorithm.DEFAULT), entropy); + kerl = new MemKERL(DigestAlgorithm.DEFAULT).cached(); + var stereotomy = new StereotomyImpl(new MemKeyStore(), kerl, entropy); identities = IntStream.range(0, CARDINALITY) .mapToObj(i -> { return stereotomy.newIdentifier(); @@ -97,7 +96,7 @@ public void churn() throws Exception { var seeds = members.values() .stream() - .map(m -> new Seed(m.getEvent().getCoordinates(), new InetSocketAddress(0))) + .map(m -> new Seed(m.getEvent(), new InetSocketAddress(0))) .limit(25) .toList(); @@ -109,9 +108,7 @@ public void churn() throws Exception { var countdown = new AtomicReference<>(new CountDownLatch(1)); long then = System.currentTimeMillis(); - views.get(0) - .start(() -> countdown.get().countDown(), gossipDuration, Collections.emptyList(), - Executors.newScheduledThreadPool(1, Thread.ofVirtual().factory())); + views.get(0).start(() -> countdown.get().countDown(), gossipDuration, Collections.emptyList()); assertTrue(countdown.get().await(30, TimeUnit.SECONDS), "Kernel did not bootstrap"); @@ -120,8 +117,7 @@ public void churn() throws Exception { var bootstrappers = views.subList(1, seeds.size()); countdown.set(new CountDownLatch(bootstrappers.size())); - bootstrappers.forEach(v -> v.start(() -> countdown.get().countDown(), gossipDuration, bootstrapSeed, - Executors.newScheduledThreadPool(1, Thread.ofVirtual().factory()))); + bootstrappers.forEach(v -> v.start(() -> countdown.get().countDown(), gossipDuration, bootstrapSeed)); // Test that all seeds up var success = countdown.get().await(30, TimeUnit.SECONDS); @@ -137,7 +133,7 @@ public void churn() throws Exception { "Seeds have stabilized in " + (System.currentTimeMillis() - then) + " Ms across all " + testViews.size() + " members"); - // Bring up the remaining members step wise + // Bring up the remaining members stepwise for (int i = 0; i < 3; i++) { int start = testViews.size(); var toStart = new ArrayList(); @@ -149,8 +145,7 @@ public void churn() throws Exception { then = System.currentTimeMillis(); countdown.set(new CountDownLatch(toStart.size())); - toStart.forEach(view -> view.start(() -> countdown.get().countDown(), gossipDuration, seeds, - Executors.newScheduledThreadPool(1, Thread.ofVirtual().factory()))); + toStart.forEach(view -> view.start(() -> countdown.get().countDown(), gossipDuration, seeds)); success = countdown.get().await(30, TimeUnit.SECONDS); failed = testViews.stream() @@ -222,8 +217,8 @@ public void churn() throws Exception { final var expected = c; // System.out.println("** Removed: " + removed); then = System.currentTimeMillis(); - success = Utils.waitForCondition(30_000, 1_000, () -> { - return expected.stream().filter(view -> view.getContext().totalCount() > expected.size()).count() < 3; + success = Utils.waitForCondition(60_000, 1_000, () -> { + return expected.stream().filter(view -> view.getContext().totalCount() > expected.size()).count() == 0; }); failed = expected.stream() .filter(e -> e.getContext().activeCount() != testViews.size()) @@ -259,7 +254,7 @@ public void churn() throws Exception { } private void initialize() { - var parameters = Parameters.newBuilder().build(); + var parameters = Parameters.newBuilder().setMaximumTxfr(20).build(); registry = new MetricRegistry(); node0Registry = new MetricRegistry(); @@ -294,8 +289,8 @@ private void initialize() { gateway.start(); gateways.add(comms); - return new View(context, node, new InetSocketAddress(0), EventValidation.NONE, comms, parameters, gateway, - DigestAlgorithm.DEFAULT, metrics); + return new View(context, node, new InetSocketAddress(0), EventValidation.NONE, Verifiers.from(kerl), comms, + parameters, gateway, DigestAlgorithm.DEFAULT, metrics); }).collect(Collectors.toList()); } } diff --git a/fireflies/src/test/java/com/salesforce/apollo/fireflies/E2ETest.java b/fireflies/src/test/java/com/salesforce/apollo/fireflies/E2ETest.java index 5c87e3c7ee..c6845c417b 100644 --- a/fireflies/src/test/java/com/salesforce/apollo/fireflies/E2ETest.java +++ b/fireflies/src/test/java/com/salesforce/apollo/fireflies/E2ETest.java @@ -18,9 +18,7 @@ import com.salesforce.apollo.fireflies.View.Seed; import com.salesforce.apollo.membership.Context; import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; -import com.salesforce.apollo.stereotomy.ControlledIdentifier; -import com.salesforce.apollo.stereotomy.EventValidation; -import com.salesforce.apollo.stereotomy.StereotomyImpl; +import com.salesforce.apollo.stereotomy.*; import com.salesforce.apollo.stereotomy.identifier.SelfAddressingIdentifier; import com.salesforce.apollo.stereotomy.mem.MemKERL; import com.salesforce.apollo.stereotomy.mem.MemKeyStore; @@ -34,7 +32,6 @@ import java.time.Duration; import java.util.*; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; @@ -55,9 +52,10 @@ public class E2ETest { private static Map> identities; private static boolean largeTests = Boolean.getBoolean( "large_tests"); + private static KERL.AppendKERL kerl; static { - CARDINALITY = largeTests ? 30 : 10; + CARDINALITY = largeTests ? 30 : 12; } private List communications = new ArrayList<>(); @@ -71,7 +69,8 @@ public class E2ETest { public static void beforeClass() throws Exception { var entropy = SecureRandom.getInstance("SHA1PRNG"); entropy.setSeed(new byte[] { 6, 6, 6 }); - var stereotomy = new StereotomyImpl(new MemKeyStore(), new MemKERL(DigestAlgorithm.DEFAULT), entropy); + kerl = new MemKERL(DigestAlgorithm.DEFAULT); + var stereotomy = new StereotomyImpl(new MemKeyStore(), kerl, entropy); identities = IntStream.range(0, CARDINALITY) .mapToObj(i -> { return stereotomy.newIdentifier(); @@ -103,25 +102,22 @@ public void smokin() throws Exception { final var seeds = members.values() .stream() - .map(m -> new Seed(m.getEvent().getCoordinates(), new InetSocketAddress(0))) - .limit(largeTests ? 100 : 10) + .map(m -> new Seed(m.getEvent(), new InetSocketAddress(0))) + .limit(largeTests ? 10 : 1) .toList(); final var bootstrapSeed = seeds.subList(0, 1); final var gossipDuration = Duration.ofMillis(largeTests ? 70 : 5); var countdown = new AtomicReference<>(new CountDownLatch(1)); - views.get(0) - .start(() -> countdown.get().countDown(), gossipDuration, Collections.emptyList(), - Executors.newScheduledThreadPool(2, Thread.ofVirtual().factory())); + views.get(0).start(() -> countdown.get().countDown(), gossipDuration, Collections.emptyList()); assertTrue(countdown.get().await(largeTests ? 2400 : 30, TimeUnit.SECONDS), "Kernel did not bootstrap"); var bootstrappers = views.subList(0, seeds.size()); countdown.set(new CountDownLatch(seeds.size() - 1)); bootstrappers.subList(1, bootstrappers.size()) - .forEach(v -> v.start(() -> countdown.get().countDown(), gossipDuration, bootstrapSeed, - Executors.newScheduledThreadPool(2, Thread.ofVirtual().factory()))); + .forEach(v -> v.start(() -> countdown.get().countDown(), gossipDuration, bootstrapSeed)); // Test that all bootstrappers up var success = countdown.get().await(largeTests ? 2400 : 30, TimeUnit.SECONDS); @@ -134,8 +130,7 @@ public void smokin() throws Exception { // Start remaining views countdown.set(new CountDownLatch(views.size() - seeds.size())); - views.forEach(v -> v.start(() -> countdown.get().countDown(), gossipDuration, seeds, - Executors.newScheduledThreadPool(2, Thread.ofVirtual().factory()))); + views.forEach(v -> v.start(() -> countdown.get().countDown(), gossipDuration, seeds)); success = countdown.get().await(largeTests ? 2400 : 30, TimeUnit.SECONDS); @@ -158,7 +153,7 @@ public void smokin() throws Exception { .map(v -> String.format("%s : %s : %s ", v.getNode().getId(), v.getContext().activeCount(), v.getContext().totalCount())) .toList(); - assertTrue(success, + assertTrue(success || failed.isEmpty(), "Views did not stabilize, expected: " + views.size() + " failed: " + failed.size() + " views: " + failed); @@ -173,10 +168,7 @@ public void smokin() throws Exception { } private void initialize() { - var parameters = Parameters.newBuilder() - .setMaxPending(largeTests ? 10 : 10) - .setMaximumTxfr(largeTests ? 100 : 20) - .build(); + var parameters = Parameters.newBuilder().setMaxPending(5).setMaximumTxfr(5).build(); registry = new MetricRegistry(); node0Registry = new MetricRegistry(); @@ -211,8 +203,8 @@ private void initialize() { gateway.start(); gateways.add(comms); - return new View(context, node, new InetSocketAddress(0), EventValidation.NONE, comms, parameters, gateway, - DigestAlgorithm.DEFAULT, metrics); + return new View(context, node, new InetSocketAddress(0), EventValidation.NONE, Verifiers.from(kerl), comms, + parameters, gateway, DigestAlgorithm.DEFAULT, metrics); }).collect(Collectors.toList()); } diff --git a/fireflies/src/test/java/com/salesforce/apollo/fireflies/MtlsTest.java b/fireflies/src/test/java/com/salesforce/apollo/fireflies/MtlsTest.java index 413406bc8d..2d9a30989a 100644 --- a/fireflies/src/test/java/com/salesforce/apollo/fireflies/MtlsTest.java +++ b/fireflies/src/test/java/com/salesforce/apollo/fireflies/MtlsTest.java @@ -21,10 +21,7 @@ import com.salesforce.apollo.membership.Context; import com.salesforce.apollo.membership.Member; import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; -import com.salesforce.apollo.stereotomy.ControlledIdentifier; -import com.salesforce.apollo.stereotomy.EventValidation; -import com.salesforce.apollo.stereotomy.Stereotomy; -import com.salesforce.apollo.stereotomy.StereotomyImpl; +import com.salesforce.apollo.stereotomy.*; import com.salesforce.apollo.stereotomy.identifier.SelfAddressingIdentifier; import com.salesforce.apollo.stereotomy.mem.MemKERL; import com.salesforce.apollo.stereotomy.mem.MemKeyStore; @@ -45,7 +42,6 @@ import java.time.Instant; import java.util.*; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; @@ -69,7 +65,7 @@ public class MtlsTest { private static Map> identities; static { - CARDINALITY = LARGE_TESTS ? 100 : 10; + CARDINALITY = LARGE_TESTS ? 20 : 10; } private List communications = new ArrayList<>(); @@ -106,7 +102,7 @@ public void after() { @Test public void smoke() throws Exception { - var parameters = Parameters.newBuilder().build(); + var parameters = Parameters.newBuilder().setMaximumTxfr(20).build(); final Duration duration = Duration.ofMillis(50); var registry = new MetricRegistry(); var node0Registry = new MetricRegistry(); @@ -115,7 +111,7 @@ public void smoke() throws Exception { var ctxBuilder = Context.newBuilder().setCardinality(CARDINALITY); var seeds = members.stream() - .map(m -> new Seed(m.getEvent().getCoordinates(), endpoints.get(m.getId()))) + .map(m -> new Seed(m.getEvent(), endpoints.get(m.getId()))) .limit(LARGE_TESTS ? 24 : 3) .toList(); @@ -135,8 +131,8 @@ public void smoke() throws Exception { Router comms = new MtlsServer(node, ep, clientContextSupplier, serverContextSupplier(certWithKey)).router( builder); communications.add(comms); - return new View(context, node, endpoints.get(node.getId()), EventValidation.NONE, comms, parameters, - DigestAlgorithm.DEFAULT, metrics); + return new View(context, node, endpoints.get(node.getId()), EventValidation.NONE, Verifiers.NONE, comms, + parameters, DigestAlgorithm.DEFAULT, metrics); }).collect(Collectors.toList()); var then = System.currentTimeMillis(); @@ -144,9 +140,7 @@ public void smoke() throws Exception { var countdown = new AtomicReference<>(new CountDownLatch(1)); - views.get(0) - .start(() -> countdown.get().countDown(), duration, Collections.emptyList(), - Executors.newScheduledThreadPool(2, Thread.ofVirtual().factory())); + views.get(0).start(() -> countdown.get().countDown(), duration, Collections.emptyList()); assertTrue(countdown.get().await(30, TimeUnit.SECONDS), "KERNEL did not stabilize"); @@ -155,14 +149,12 @@ public void smoke() throws Exception { countdown.set(new CountDownLatch(seedlings.size())); - seedlings.forEach(view -> view.start(() -> countdown.get().countDown(), duration, kernel, - Executors.newScheduledThreadPool(2, Thread.ofVirtual().factory()))); + seedlings.forEach(view -> view.start(() -> countdown.get().countDown(), duration, kernel)); assertTrue(countdown.get().await(30, TimeUnit.SECONDS), "Seeds did not stabilize"); countdown.set(new CountDownLatch(views.size() - seeds.size())); - views.forEach(view -> view.start(() -> countdown.get().countDown(), duration, seeds, - Executors.newScheduledThreadPool(2, Thread.ofVirtual().factory()))); + views.forEach(view -> view.start(() -> countdown.get().countDown(), duration, seeds)); assertTrue(Utils.waitForCondition(120_000, 1_000, () -> { return views.stream() diff --git a/fireflies/src/test/java/com/salesforce/apollo/fireflies/SwarmTest.java b/fireflies/src/test/java/com/salesforce/apollo/fireflies/SwarmTest.java index 58375529fa..16ea84559b 100644 --- a/fireflies/src/test/java/com/salesforce/apollo/fireflies/SwarmTest.java +++ b/fireflies/src/test/java/com/salesforce/apollo/fireflies/SwarmTest.java @@ -18,9 +18,7 @@ import com.salesforce.apollo.fireflies.View.Seed; import com.salesforce.apollo.membership.Context; import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; -import com.salesforce.apollo.stereotomy.ControlledIdentifier; -import com.salesforce.apollo.stereotomy.EventValidation; -import com.salesforce.apollo.stereotomy.StereotomyImpl; +import com.salesforce.apollo.stereotomy.*; import com.salesforce.apollo.stereotomy.identifier.SelfAddressingIdentifier; import com.salesforce.apollo.stereotomy.mem.MemKERL; import com.salesforce.apollo.stereotomy.mem.MemKeyStore; @@ -34,7 +32,6 @@ import java.time.Duration; import java.util.*; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; @@ -56,9 +53,10 @@ public class SwarmTest { private static Map> identities; private static boolean largeTests = Boolean.getBoolean( "large_tests"); + private static KERL.AppendKERL kerl; static { - CARDINALITY = largeTests ? 500 : 100; + CARDINALITY = largeTests ? 100 : 50; } private List communications = new ArrayList<>(); @@ -72,7 +70,8 @@ public class SwarmTest { public static void beforeClass() throws Exception { var entropy = SecureRandom.getInstance("SHA1PRNG"); entropy.setSeed(new byte[] { 6, 6, 6 }); - var stereotomy = new StereotomyImpl(new MemKeyStore(), new MemKERL(DigestAlgorithm.DEFAULT), entropy); + kerl = new MemKERL(DigestAlgorithm.DEFAULT); + var stereotomy = new StereotomyImpl(new MemKeyStore(), kerl, entropy); identities = IntStream.range(0, CARDINALITY) .mapToObj(i -> { return stereotomy.newIdentifier(); @@ -104,7 +103,7 @@ public void swarm() throws Exception { final var seeds = members.values() .stream() - .map(m -> new Seed(m.getEvent().getCoordinates(), new InetSocketAddress(0))) + .map(m -> new Seed(m.getEvent(), new InetSocketAddress(0))) .limit(largeTests ? 100 : 10) .toList(); final var bootstrapSeed = seeds.subList(0, 1); @@ -112,20 +111,17 @@ public void swarm() throws Exception { final var gossipDuration = Duration.ofMillis(largeTests ? 150 : 5); var countdown = new AtomicReference<>(new CountDownLatch(1)); - views.get(0) - .start(() -> countdown.get().countDown(), gossipDuration, Collections.emptyList(), - Executors.newScheduledThreadPool(2, Thread.ofVirtual().factory())); + views.get(0).start(() -> countdown.get().countDown(), gossipDuration, Collections.emptyList()); - assertTrue(countdown.get().await(30, TimeUnit.SECONDS), "Kernel did not bootstrap"); + assertTrue(countdown.get().await(60, TimeUnit.SECONDS), "Kernel did not bootstrap"); var bootstrappers = views.subList(0, seeds.size()); countdown.set(new CountDownLatch(seeds.size() - 1)); bootstrappers.subList(1, bootstrappers.size()) - .forEach(v -> v.start(() -> countdown.get().countDown(), gossipDuration, bootstrapSeed, - Executors.newScheduledThreadPool(2, Thread.ofVirtual().factory()))); + .forEach(v -> v.start(() -> countdown.get().countDown(), gossipDuration, bootstrapSeed)); // Test that all bootstrappers up - var success = countdown.get().await(largeTests ? 2400 : 30, TimeUnit.SECONDS); + var success = countdown.get().await(largeTests ? 2400 : 60, TimeUnit.SECONDS); var failed = bootstrappers.stream() .filter(e -> e.getContext().activeCount() != bootstrappers.size()) .map( @@ -135,21 +131,20 @@ public void swarm() throws Exception { // Start remaining views countdown.set(new CountDownLatch(views.size() - seeds.size())); - views.forEach(v -> v.start(() -> countdown.get().countDown(), gossipDuration, seeds, - Executors.newScheduledThreadPool(2, Thread.ofVirtual().factory()))); + views.forEach(v -> v.start(() -> countdown.get().countDown(), gossipDuration, seeds)); - success = countdown.get().await(largeTests ? 2400 : 30, TimeUnit.SECONDS); + success = countdown.get().await(largeTests ? 2400 : 120, TimeUnit.SECONDS); // Test that all views are up failed = views.stream() .filter(e -> e.getContext().activeCount() != CARDINALITY) - .map(v -> String.format("%s : %s : %s ", v.getNode().getId(), v.getContext().activeCount(), - v.getContext().totalCount())) + .map(v -> String.format("%s : %s : %s : %s ", v.getNode().getId(), v.getContext().cardinality(), + v.getContext().activeCount(), v.getContext().totalCount())) .toList(); assertTrue(success, "Views did not start, expected: " + views.size() + " failed: " + failed.size() + " views: " + failed); - success = Utils.waitForCondition(largeTests ? 2400_000 : 30, 1_000, () -> { + success = Utils.waitForCondition(largeTests ? 2400_000 : 120_000, 1_000, () -> { return views.stream().filter(view -> view.getContext().activeCount() != CARDINALITY).count() == 0; }); @@ -243,8 +238,8 @@ private void initialize() { gateway.start(); gateways.add(comms); - return new View(context, node, new InetSocketAddress(0), EventValidation.NONE, comms, parameters, gateway, - DigestAlgorithm.DEFAULT, metrics); + return new View(context, node, new InetSocketAddress(0), EventValidation.NONE, Verifiers.from(kerl), comms, + parameters, gateway, DigestAlgorithm.DEFAULT, metrics); }).collect(Collectors.toList()); } } diff --git a/fireflies/src/test/resources/logback-test.xml b/fireflies/src/test/resources/logback-test.xml index 4791e0131b..2cc64f72bd 100644 --- a/fireflies/src/test/resources/logback-test.xml +++ b/fireflies/src/test/resources/logback-test.xml @@ -2,11 +2,11 @@ - + - - %msg%n + %d{mm:ss.SSS} [%thread] %-5level %logger{0} - %msg%n @@ -14,39 +14,50 @@ ff.log false - - %d{mm:ss.SSS} - %msg%n - + %d{mm:ss.SSS} [%thread] %-5level %logger{0} - %msg%n - + - - + + - - + + + - - + + + + + + - + + + + + + + + + - - + + - - + + diff --git a/gorgoneion-client/src/main/java/com/salesforce/apollo/gorgoneion/client/GorgoneionClient.java b/gorgoneion-client/src/main/java/com/salesforce/apollo/gorgoneion/client/GorgoneionClient.java index 834e8c4079..dad0f313f6 100644 --- a/gorgoneion-client/src/main/java/com/salesforce/apollo/gorgoneion/client/GorgoneionClient.java +++ b/gorgoneion-client/src/main/java/com/salesforce/apollo/gorgoneion/client/GorgoneionClient.java @@ -8,12 +8,12 @@ import com.google.protobuf.Any; import com.google.protobuf.Timestamp; -import com.salesfoce.apollo.gorgoneion.proto.Attestation; -import com.salesfoce.apollo.gorgoneion.proto.Credentials; -import com.salesfoce.apollo.gorgoneion.proto.SignedAttestation; -import com.salesfoce.apollo.gorgoneion.proto.SignedNonce; -import com.salesfoce.apollo.stereotomy.event.proto.KERL_; -import com.salesfoce.apollo.stereotomy.event.proto.Validations; +import com.salesforce.apollo.gorgoneion.proto.Attestation; +import com.salesforce.apollo.gorgoneion.proto.Credentials; +import com.salesforce.apollo.gorgoneion.proto.SignedAttestation; +import com.salesforce.apollo.gorgoneion.proto.SignedNonce; +import com.salesforce.apollo.stereotomy.event.proto.KERL_; +import com.salesforce.apollo.stereotomy.event.proto.Validations; import com.salesforce.apollo.gorgoneion.client.client.comm.Admissions; import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; import org.slf4j.Logger; @@ -30,12 +30,12 @@ public class GorgoneionClient { private static final Logger log = LoggerFactory.getLogger(GorgoneionClient.class); private final Function attester; - private final Admissions client; - private final Clock clock; + private final Admissions client; + private final Clock clock; private final ControlledIdentifierMember member; - public GorgoneionClient(ControlledIdentifierMember member, Function attester, - Clock clock, Admissions client) { + public GorgoneionClient(ControlledIdentifierMember member, Function attester, Clock clock, + Admissions client) { this.member = member; this.attester = attester; this.clock = clock; @@ -53,17 +53,16 @@ private SignedAttestation attestation(SignedNonce nonce, Any proof) { KERL_ kerl = member.kerl(); var now = clock.instant(); var attestation = Attestation.newBuilder() - .setAttestation(proof) - .setKerl(kerl) - .setNonce(member.sign(nonce.toByteString()).toSig()) - .setTimestamp(Timestamp.newBuilder() - .setSeconds(now.getEpochSecond()) - .setNanos(now.getNano())) - .build(); + .setAttestation(proof) + .setKerl(kerl) + .setNonce(member.sign(nonce.toByteString()).toSig()) + .setTimestamp( + Timestamp.newBuilder().setSeconds(now.getEpochSecond()).setNanos(now.getNano())) + .build(); return SignedAttestation.newBuilder() - .setAttestation(attestation) - .setSignature(member.sign(attestation.toByteString()).toSig()) - .build(); + .setAttestation(attestation) + .setSignature(member.sign(attestation.toByteString()).toSig()) + .build(); } @@ -71,9 +70,6 @@ private Credentials credentials(SignedNonce nonce) { KERL_ kerl = member.kerl(); var attestation = attester.apply(nonce); var sa = attestation(nonce, attestation); - return Credentials.newBuilder() - .setNonce(nonce) - .setAttestation(sa) - .build(); + return Credentials.newBuilder().setNonce(nonce).setAttestation(sa).build(); } } diff --git a/gorgoneion-client/src/main/java/com/salesforce/apollo/gorgoneion/client/client/comm/Admissions.java b/gorgoneion-client/src/main/java/com/salesforce/apollo/gorgoneion/client/client/comm/Admissions.java index 1918e6f02a..7be8ac5fde 100644 --- a/gorgoneion-client/src/main/java/com/salesforce/apollo/gorgoneion/client/client/comm/Admissions.java +++ b/gorgoneion-client/src/main/java/com/salesforce/apollo/gorgoneion/client/client/comm/Admissions.java @@ -6,10 +6,10 @@ */ package com.salesforce.apollo.gorgoneion.client.client.comm; -import com.salesfoce.apollo.gorgoneion.proto.Credentials; -import com.salesfoce.apollo.gorgoneion.proto.SignedNonce; -import com.salesfoce.apollo.stereotomy.event.proto.KERL_; -import com.salesfoce.apollo.stereotomy.event.proto.Validations; +import com.salesforce.apollo.gorgoneion.proto.Credentials; +import com.salesforce.apollo.gorgoneion.proto.SignedNonce; +import com.salesforce.apollo.stereotomy.event.proto.KERL_; +import com.salesforce.apollo.stereotomy.event.proto.Validations; import com.salesforce.apollo.archipelago.Link; import com.salesforce.apollo.membership.Member; diff --git a/gorgoneion-client/src/main/java/com/salesforce/apollo/gorgoneion/client/client/comm/AdmissionsClient.java b/gorgoneion-client/src/main/java/com/salesforce/apollo/gorgoneion/client/client/comm/AdmissionsClient.java index d31872163e..610698cce6 100644 --- a/gorgoneion-client/src/main/java/com/salesforce/apollo/gorgoneion/client/client/comm/AdmissionsClient.java +++ b/gorgoneion-client/src/main/java/com/salesforce/apollo/gorgoneion/client/client/comm/AdmissionsClient.java @@ -6,11 +6,11 @@ */ package com.salesforce.apollo.gorgoneion.client.client.comm; -import com.salesfoce.apollo.gorgoneion.proto.AdmissionsGrpc; -import com.salesfoce.apollo.gorgoneion.proto.Credentials; -import com.salesfoce.apollo.gorgoneion.proto.SignedNonce; -import com.salesfoce.apollo.stereotomy.event.proto.KERL_; -import com.salesfoce.apollo.stereotomy.event.proto.Validations; +import com.salesforce.apollo.gorgoneion.proto.AdmissionsGrpc; +import com.salesforce.apollo.gorgoneion.proto.Credentials; +import com.salesforce.apollo.gorgoneion.proto.SignedNonce; +import com.salesforce.apollo.stereotomy.event.proto.KERL_; +import com.salesforce.apollo.stereotomy.event.proto.Validations; import com.salesforce.apollo.archipelago.ManagedServerChannel; import com.salesforce.apollo.archipelago.ServerConnectionCache.CreateClientCommunications; import com.salesforce.apollo.membership.Member; diff --git a/gorgoneion-client/src/test/java/com/salesforce/apollo/gorgoneion/client/GorgoneionClientTest.java b/gorgoneion-client/src/test/java/com/salesforce/apollo/gorgoneion/client/GorgoneionClientTest.java index b0ee93f65e..7a47198981 100644 --- a/gorgoneion-client/src/test/java/com/salesforce/apollo/gorgoneion/client/GorgoneionClientTest.java +++ b/gorgoneion-client/src/test/java/com/salesforce/apollo/gorgoneion/client/GorgoneionClientTest.java @@ -7,9 +7,8 @@ package com.salesforce.apollo.gorgoneion.client; import com.google.protobuf.Any; -import com.salesfoce.apollo.gorgoneion.proto.SignedNonce; -import com.salesfoce.apollo.stereotomy.event.proto.Validations; import com.salesforce.apollo.archipelago.LocalServer; +import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.ServerConnectionCache; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.gorgoneion.Gorgoneion; @@ -18,13 +17,16 @@ import com.salesforce.apollo.gorgoneion.client.client.comm.AdmissionsClient; import com.salesforce.apollo.gorgoneion.comm.admissions.AdmissionsServer; import com.salesforce.apollo.gorgoneion.comm.admissions.AdmissionsService; +import com.salesforce.apollo.gorgoneion.proto.SignedNonce; import com.salesforce.apollo.membership.Context; import com.salesforce.apollo.membership.Member; import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; import com.salesforce.apollo.stereotomy.StereotomyImpl; +import com.salesforce.apollo.stereotomy.event.proto.Validations; import com.salesforce.apollo.stereotomy.mem.MemKERL; import com.salesforce.apollo.stereotomy.mem.MemKeyStore; import com.salesforce.apollo.stereotomy.services.proto.ProtoEventObserver; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; @@ -40,17 +42,21 @@ import java.util.stream.IntStream; import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; /** * @author hal.hildebrand */ public class GorgoneionClientTest { + private Router gorgonRouter; + private Router clientRouter; + @Test public void clientSmoke() throws Exception { var entropy = SecureRandom.getInstance("SHA1PRNG"); - entropy.setSeed(new byte[]{6, 6, 6}); + entropy.setSeed(new byte[] { 6, 6, 6 }); final var kerl = new MemKERL(DigestAlgorithm.DEFAULT); var stereotomy = new StereotomyImpl(new MemKeyStore(), kerl, entropy); final var prefix = UUID.randomUUID().toString(); @@ -59,9 +65,7 @@ public void clientSmoke() throws Exception { context.activate(member); // Gorgoneion service comms - var gorgonRouter = new LocalServer(prefix, member) - .router(ServerConnectionCache.newBuilder() - .setTarget(2)); + gorgonRouter = new LocalServer(prefix, member).router(ServerConnectionCache.newBuilder().setTarget(2)); gorgonRouter.start(); // The kerl observer to publish admitted client KERLs to @@ -69,20 +73,19 @@ public void clientSmoke() throws Exception { final var parameters = Parameters.newBuilder().setKerl(kerl).build(); @SuppressWarnings("unused") var gorgon = new Gorgoneion(parameters, member, context, observer, gorgonRouter, - Executors.newScheduledThreadPool(1, Thread.ofVirtual().factory()), null); + Executors.newScheduledThreadPool(1, Thread.ofVirtual().factory()), null); // The registering client var client = new ControlledIdentifierMember(stereotomy.newIdentifier()); // Registering client comms - var clientRouter = new LocalServer(prefix, client).router(ServerConnectionCache.newBuilder().setTarget(2) - ); + clientRouter = new LocalServer(prefix, client).router(ServerConnectionCache.newBuilder().setTarget(2)); var admissions = mock(AdmissionsService.class); var clientComminications = clientRouter.create(client, context.getId(), admissions, ":admissions", - r -> new AdmissionsServer(clientRouter.getClientIdentityProvider(), - r, null), - AdmissionsClient.getCreate(null), - Admissions.getLocalLoopback(client)); + r -> new AdmissionsServer( + clientRouter.getClientIdentityProvider(), r, null), + AdmissionsClient.getCreate(null), + Admissions.getLocalLoopback(client)); clientRouter.start(); // Admin client link @@ -105,17 +108,29 @@ public void clientSmoke() throws Exception { // Verify client KERL published // Because this is a minimal test, the notarization is not published -// verify(observer, times(3)).publish(client.kerl().get(), Collections.singletonList(invitation)); + // verify(observer, times(3)).publish(client.kerl().get(), Collections.singletonList(invitation)); + } + + @AfterEach + public void closeRouters() { + if (gorgonRouter != null) { + gorgonRouter.close(Duration.ofSeconds(3)); + } + if (clientRouter != null) { + clientRouter.close(Duration.ofSeconds(3)); + } } @Test public void multiSmoke() throws Exception { var entropy = SecureRandom.getInstance("SHA1PRNG"); - entropy.setSeed(new byte[]{6, 6, 6}); + entropy.setSeed(new byte[] { 6, 6, 6 }); final var kerl = new MemKERL(DigestAlgorithm.DEFAULT); var stereotomy = new StereotomyImpl(new MemKeyStore(), kerl, entropy); final var prefix = UUID.randomUUID().toString(); - final var members = IntStream.range(0, 10).mapToObj(i -> new ControlledIdentifierMember(stereotomy.newIdentifier())).toList(); + final var members = IntStream.range(0, 10) + .mapToObj(i -> new ControlledIdentifierMember(stereotomy.newIdentifier())) + .toList(); var countdown = new CountDownLatch(3); // The kerl observer to publish admitted client KERLs to @@ -132,33 +147,31 @@ public Void answer(InvocationOnMock invocation) { context.activate(member); } final var parameters = Parameters.newBuilder().setKerl(kerl).build(); - final var exec = Executors.newVirtualThreadPerTaskExecutor(); - members.stream().map(m -> { - final var router = new LocalServer(prefix, m).router(ServerConnectionCache.newBuilder().setTarget(2) - ); - router.start(); - return router; - }) - .map(r -> new Gorgoneion(parameters, (ControlledIdentifierMember) r.getFrom(), - context, observer, r, - Executors.newScheduledThreadPool(2, - Thread.ofVirtual() - .factory()), - null)) - .toList(); + final var gorgoneions = members.stream() + .map(m -> { + final var router = new LocalServer(prefix, m).router( + ServerConnectionCache.newBuilder().setTarget(2)); + router.start(); + return router; + }) + .map(r -> new Gorgoneion(parameters, (ControlledIdentifierMember) r.getFrom(), + context, observer, r, + Executors.newScheduledThreadPool(2, Thread.ofVirtual() + .factory()), + null)) + .toList(); // The registering client var client = new ControlledIdentifierMember(stereotomy.newIdentifier()); // Registering client comms - var clientRouter = new LocalServer(prefix, client).router(ServerConnectionCache.newBuilder().setTarget(2) - ); + var clientRouter = new LocalServer(prefix, client).router(ServerConnectionCache.newBuilder().setTarget(2)); AdmissionsService admissions = mock(AdmissionsService.class); var clientComminications = clientRouter.create(client, context.getId(), admissions, ":admissions", - r -> new AdmissionsServer(clientRouter.getClientIdentityProvider(), - r, null), - AdmissionsClient.getCreate(null), - Admissions.getLocalLoopback(client)); + r -> new AdmissionsServer( + clientRouter.getClientIdentityProvider(), r, null), + AdmissionsClient.getCreate(null), + Admissions.getLocalLoopback(client)); clientRouter.start(); // Admin client link diff --git a/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/Gorgoneion.java b/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/Gorgoneion.java index 6f9e4675ef..3d8245df60 100644 --- a/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/Gorgoneion.java +++ b/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/Gorgoneion.java @@ -9,12 +9,12 @@ import com.codahale.metrics.Timer; import com.google.protobuf.Empty; import com.google.protobuf.Timestamp; -import com.salesfoce.apollo.gorgoneion.proto.*; -import com.salesfoce.apollo.stereotomy.event.proto.Ident; -import com.salesfoce.apollo.stereotomy.event.proto.KERL_; -import com.salesfoce.apollo.stereotomy.event.proto.Validation_; -import com.salesfoce.apollo.stereotomy.event.proto.Validations; -import com.salesfoce.apollo.cryptography.proto.Digeste; +import com.salesforce.apollo.gorgoneion.proto.*; +import com.salesforce.apollo.stereotomy.event.proto.Ident; +import com.salesforce.apollo.stereotomy.event.proto.KERL_; +import com.salesforce.apollo.stereotomy.event.proto.Validation_; +import com.salesforce.apollo.stereotomy.event.proto.Validations; +import com.salesforce.apollo.cryptography.proto.Digeste; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.RouterImpl.CommonCommunications; import com.salesforce.apollo.cryptography.Digest; diff --git a/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/Parameters.java b/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/Parameters.java index 7ddacece77..0419f4c507 100644 --- a/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/Parameters.java +++ b/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/Parameters.java @@ -6,7 +6,7 @@ */ package com.salesforce.apollo.gorgoneion; -import com.salesfoce.apollo.gorgoneion.proto.SignedAttestation; +import com.salesforce.apollo.gorgoneion.proto.SignedAttestation; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.stereotomy.KERL; @@ -17,9 +17,8 @@ /** * @author hal.hildebrand */ -public record Parameters(Predicate verifier, Clock clock, - Duration registrationTimeout, Duration frequency, DigestAlgorithm digestAlgorithm, - Duration maxDuration, KERL kerl) { +public record Parameters(Predicate verifier, Clock clock, Duration registrationTimeout, + Duration frequency, DigestAlgorithm digestAlgorithm, Duration maxDuration, KERL kerl) { public static Builder newBuilder() { return new Builder(); @@ -32,12 +31,12 @@ public static class Builder { defaultVerifier = x -> true; } - private Clock clock = Clock.systemUTC(); - private DigestAlgorithm digestAlgorithm = DigestAlgorithm.DEFAULT; - private Duration frequency = Duration.ofMillis(5); - private KERL kerl; - private Duration maxDuration = Duration.ofSeconds(30); - private Duration registrationTimeout = Duration.ofSeconds(30); + private Clock clock = Clock.systemUTC(); + private DigestAlgorithm digestAlgorithm = DigestAlgorithm.DEFAULT; + private Duration frequency = Duration.ofMillis(5); + private KERL kerl; + private Duration maxDuration = Duration.ofSeconds(30); + private Duration registrationTimeout = Duration.ofSeconds(30); private Predicate verifier = defaultVerifier; diff --git a/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/admissions/AdmissionsServer.java b/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/admissions/AdmissionsServer.java index 3537dfe9a4..693185c922 100644 --- a/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/admissions/AdmissionsServer.java +++ b/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/admissions/AdmissionsServer.java @@ -6,11 +6,11 @@ */ package com.salesforce.apollo.gorgoneion.comm.admissions; -import com.salesfoce.apollo.gorgoneion.proto.AdmissionsGrpc.AdmissionsImplBase; -import com.salesfoce.apollo.gorgoneion.proto.Credentials; -import com.salesfoce.apollo.gorgoneion.proto.SignedNonce; -import com.salesfoce.apollo.stereotomy.event.proto.KERL_; -import com.salesfoce.apollo.stereotomy.event.proto.Validations; +import com.salesforce.apollo.gorgoneion.proto.AdmissionsGrpc.AdmissionsImplBase; +import com.salesforce.apollo.gorgoneion.proto.Credentials; +import com.salesforce.apollo.gorgoneion.proto.SignedNonce; +import com.salesforce.apollo.stereotomy.event.proto.KERL_; +import com.salesforce.apollo.stereotomy.event.proto.Validations; import com.salesforce.apollo.archipelago.RoutableService; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.gorgoneion.comm.GorgoneionMetrics; @@ -20,7 +20,6 @@ /** * @author hal.hildebrand - * */ public class AdmissionsServer extends AdmissionsImplBase { diff --git a/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/admissions/AdmissionsService.java b/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/admissions/AdmissionsService.java index 154d83c79c..4031e3e0c9 100644 --- a/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/admissions/AdmissionsService.java +++ b/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/admissions/AdmissionsService.java @@ -7,17 +7,16 @@ package com.salesforce.apollo.gorgoneion.comm.admissions; import com.codahale.metrics.Timer.Context; -import com.salesfoce.apollo.gorgoneion.proto.Credentials; -import com.salesfoce.apollo.gorgoneion.proto.SignedNonce; -import com.salesfoce.apollo.stereotomy.event.proto.KERL_; -import com.salesfoce.apollo.stereotomy.event.proto.Validations; +import com.salesforce.apollo.gorgoneion.proto.Credentials; +import com.salesforce.apollo.gorgoneion.proto.SignedNonce; +import com.salesforce.apollo.stereotomy.event.proto.KERL_; +import com.salesforce.apollo.stereotomy.event.proto.Validations; import com.salesforce.apollo.cryptography.Digest; import io.grpc.stub.StreamObserver; /** * @author hal.hildebrand - * */ public interface AdmissionsService { diff --git a/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/endorsement/Endorsement.java b/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/endorsement/Endorsement.java index 2cdd5a4cd6..24cad162ec 100644 --- a/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/endorsement/Endorsement.java +++ b/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/endorsement/Endorsement.java @@ -8,11 +8,11 @@ import com.google.common.util.concurrent.SettableFuture; import com.google.protobuf.Empty; -import com.salesfoce.apollo.gorgoneion.proto.Credentials; -import com.salesfoce.apollo.gorgoneion.proto.MemberSignature; -import com.salesfoce.apollo.gorgoneion.proto.Nonce; -import com.salesfoce.apollo.gorgoneion.proto.Notarization; -import com.salesfoce.apollo.stereotomy.event.proto.Validation_; +import com.salesforce.apollo.gorgoneion.proto.Credentials; +import com.salesforce.apollo.gorgoneion.proto.MemberSignature; +import com.salesforce.apollo.gorgoneion.proto.Nonce; +import com.salesforce.apollo.gorgoneion.proto.Notarization; +import com.salesforce.apollo.stereotomy.event.proto.Validation_; import com.salesforce.apollo.archipelago.Link; import com.salesforce.apollo.membership.Member; diff --git a/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/endorsement/EndorsementClient.java b/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/endorsement/EndorsementClient.java index 04ba9ab397..444b49a8b5 100644 --- a/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/endorsement/EndorsementClient.java +++ b/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/endorsement/EndorsementClient.java @@ -6,8 +6,8 @@ */ package com.salesforce.apollo.gorgoneion.comm.endorsement; -import com.salesfoce.apollo.gorgoneion.proto.*; -import com.salesfoce.apollo.stereotomy.event.proto.Validation_; +import com.salesforce.apollo.gorgoneion.proto.*; +import com.salesforce.apollo.stereotomy.event.proto.Validation_; import com.salesforce.apollo.archipelago.ManagedServerChannel; import com.salesforce.apollo.archipelago.ServerConnectionCache.CreateClientCommunications; import com.salesforce.apollo.gorgoneion.comm.GorgoneionMetrics; @@ -22,9 +22,9 @@ */ public class EndorsementClient implements Endorsement { - private final ManagedServerChannel channel; + private final ManagedServerChannel channel; private final EndorsementGrpc.EndorsementBlockingStub client; - private final GorgoneionMetrics metrics; + private final GorgoneionMetrics metrics; public EndorsementClient(ManagedServerChannel channel, GorgoneionMetrics metrics) { this.channel = channel; @@ -50,8 +50,7 @@ public MemberSignature endorse(Nonce nonce, Duration timeout) { metrics.outboundEndorseNonce().update(serializedSize); } - var result = client.withDeadlineAfter(timeout.toNanos(), TimeUnit.NANOSECONDS) - .endorse(nonce); + var result = client.withDeadlineAfter(timeout.toNanos(), TimeUnit.NANOSECONDS).endorse(nonce); if (metrics != null) { try { var serializedSize = result.getSerializedSize(); @@ -88,8 +87,7 @@ public Validation_ validate(Credentials credentials, Duration timeout) { metrics.outboundValidateCredentials().update(serializedSize); } - var result = client.withDeadlineAfter(timeout.toNanos(), TimeUnit.NANOSECONDS) - .validate(credentials); + var result = client.withDeadlineAfter(timeout.toNanos(), TimeUnit.NANOSECONDS).validate(credentials); if (metrics != null) { try { var serializedSize = result.getSerializedSize(); diff --git a/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/endorsement/EndorsementServer.java b/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/endorsement/EndorsementServer.java index 036125e7c4..731fcf5126 100644 --- a/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/endorsement/EndorsementServer.java +++ b/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/endorsement/EndorsementServer.java @@ -7,12 +7,12 @@ package com.salesforce.apollo.gorgoneion.comm.endorsement; import com.google.protobuf.Empty; -import com.salesfoce.apollo.gorgoneion.proto.Credentials; -import com.salesfoce.apollo.gorgoneion.proto.EndorsementGrpc.EndorsementImplBase; -import com.salesfoce.apollo.gorgoneion.proto.MemberSignature; -import com.salesfoce.apollo.gorgoneion.proto.Nonce; -import com.salesfoce.apollo.gorgoneion.proto.Notarization; -import com.salesfoce.apollo.stereotomy.event.proto.Validation_; +import com.salesforce.apollo.gorgoneion.proto.Credentials; +import com.salesforce.apollo.gorgoneion.proto.EndorsementGrpc.EndorsementImplBase; +import com.salesforce.apollo.gorgoneion.proto.MemberSignature; +import com.salesforce.apollo.gorgoneion.proto.Nonce; +import com.salesforce.apollo.gorgoneion.proto.Notarization; +import com.salesforce.apollo.stereotomy.event.proto.Validation_; import com.salesforce.apollo.archipelago.RoutableService; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.gorgoneion.comm.GorgoneionMetrics; @@ -23,8 +23,8 @@ * @author hal.hildebrand */ public class EndorsementServer extends EndorsementImplBase { - private final ClientIdentity identity; - private final GorgoneionMetrics metrics; + private final ClientIdentity identity; + private final GorgoneionMetrics metrics; private final RoutableService router; public EndorsementServer(ClientIdentity identity, RoutableService r, diff --git a/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/endorsement/EndorsementService.java b/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/endorsement/EndorsementService.java index 1dee788f63..e19b32027f 100644 --- a/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/endorsement/EndorsementService.java +++ b/gorgoneion/src/main/java/com/salesforce/apollo/gorgoneion/comm/endorsement/EndorsementService.java @@ -6,11 +6,11 @@ */ package com.salesforce.apollo.gorgoneion.comm.endorsement; -import com.salesfoce.apollo.gorgoneion.proto.Credentials; -import com.salesfoce.apollo.gorgoneion.proto.MemberSignature; -import com.salesfoce.apollo.gorgoneion.proto.Nonce; -import com.salesfoce.apollo.gorgoneion.proto.Notarization; -import com.salesfoce.apollo.stereotomy.event.proto.Validation_; +import com.salesforce.apollo.gorgoneion.proto.Credentials; +import com.salesforce.apollo.gorgoneion.proto.MemberSignature; +import com.salesforce.apollo.gorgoneion.proto.Nonce; +import com.salesforce.apollo.gorgoneion.proto.Notarization; +import com.salesforce.apollo.stereotomy.event.proto.Validation_; import com.salesforce.apollo.cryptography.Digest; /** diff --git a/gorgoneion/src/test/java/com/salesforce/apollo/gorgoneion/Admissions.java b/gorgoneion/src/test/java/com/salesforce/apollo/gorgoneion/Admissions.java index 4fe53974e2..c939b0e937 100644 --- a/gorgoneion/src/test/java/com/salesforce/apollo/gorgoneion/Admissions.java +++ b/gorgoneion/src/test/java/com/salesforce/apollo/gorgoneion/Admissions.java @@ -6,10 +6,10 @@ */ package com.salesforce.apollo.gorgoneion; -import com.salesfoce.apollo.gorgoneion.proto.Credentials; -import com.salesfoce.apollo.gorgoneion.proto.SignedNonce; -import com.salesfoce.apollo.stereotomy.event.proto.KERL_; -import com.salesfoce.apollo.stereotomy.event.proto.Validations; +import com.salesforce.apollo.gorgoneion.proto.Credentials; +import com.salesforce.apollo.gorgoneion.proto.SignedNonce; +import com.salesforce.apollo.stereotomy.event.proto.KERL_; +import com.salesforce.apollo.stereotomy.event.proto.Validations; import com.salesforce.apollo.archipelago.Link; import com.salesforce.apollo.membership.Member; diff --git a/gorgoneion/src/test/java/com/salesforce/apollo/gorgoneion/AdmissionsClient.java b/gorgoneion/src/test/java/com/salesforce/apollo/gorgoneion/AdmissionsClient.java index f1942a186a..781e1ca796 100644 --- a/gorgoneion/src/test/java/com/salesforce/apollo/gorgoneion/AdmissionsClient.java +++ b/gorgoneion/src/test/java/com/salesforce/apollo/gorgoneion/AdmissionsClient.java @@ -6,11 +6,11 @@ */ package com.salesforce.apollo.gorgoneion; -import com.salesfoce.apollo.gorgoneion.proto.AdmissionsGrpc; -import com.salesfoce.apollo.gorgoneion.proto.Credentials; -import com.salesfoce.apollo.gorgoneion.proto.SignedNonce; -import com.salesfoce.apollo.stereotomy.event.proto.KERL_; -import com.salesfoce.apollo.stereotomy.event.proto.Validations; +import com.salesforce.apollo.gorgoneion.proto.AdmissionsGrpc; +import com.salesforce.apollo.gorgoneion.proto.Credentials; +import com.salesforce.apollo.gorgoneion.proto.SignedNonce; +import com.salesforce.apollo.stereotomy.event.proto.KERL_; +import com.salesforce.apollo.stereotomy.event.proto.Validations; import com.salesforce.apollo.archipelago.ManagedServerChannel; import com.salesforce.apollo.archipelago.ServerConnectionCache.CreateClientCommunications; import com.salesforce.apollo.membership.Member; @@ -23,8 +23,9 @@ */ public class AdmissionsClient implements Admissions { - private final ManagedServerChannel channel; + private final ManagedServerChannel channel; private final AdmissionsGrpc.AdmissionsBlockingStub client; + public AdmissionsClient(ManagedServerChannel channel) { this.channel = channel; this.client = AdmissionsGrpc.newBlockingStub(channel).withCompression("gzip"); diff --git a/gorgoneion/src/test/java/com/salesforce/apollo/gorgoneion/GorgoneionTest.java b/gorgoneion/src/test/java/com/salesforce/apollo/gorgoneion/GorgoneionTest.java index 724fbcf870..774f39020a 100644 --- a/gorgoneion/src/test/java/com/salesforce/apollo/gorgoneion/GorgoneionTest.java +++ b/gorgoneion/src/test/java/com/salesforce/apollo/gorgoneion/GorgoneionTest.java @@ -8,11 +8,11 @@ import com.google.protobuf.Any; import com.google.protobuf.Timestamp; -import com.salesfoce.apollo.gorgoneion.proto.Attestation; -import com.salesfoce.apollo.gorgoneion.proto.Credentials; -import com.salesfoce.apollo.gorgoneion.proto.SignedAttestation; -import com.salesfoce.apollo.stereotomy.event.proto.KERL_; -import com.salesfoce.apollo.stereotomy.event.proto.Validations; +import com.salesforce.apollo.gorgoneion.proto.Attestation; +import com.salesforce.apollo.gorgoneion.proto.Credentials; +import com.salesforce.apollo.gorgoneion.proto.SignedAttestation; +import com.salesforce.apollo.stereotomy.event.proto.KERL_; +import com.salesforce.apollo.stereotomy.event.proto.Validations; import com.salesforce.apollo.archipelago.LocalServer; import com.salesforce.apollo.archipelago.ServerConnectionCache; import com.salesforce.apollo.cryptography.DigestAlgorithm; @@ -60,8 +60,8 @@ public void smokin() throws Exception { var observer = mock(ProtoEventObserver.class); @SuppressWarnings("unused") var gorgon = new Gorgoneion(Parameters.newBuilder().setKerl(kerl).build(), member, context, observer, - gorgonRouter, - Executors.newScheduledThreadPool(1, Thread.ofVirtual().factory()), null); + gorgonRouter, Executors.newScheduledThreadPool(1, Thread.ofVirtual().factory()), + null); // The registering client var client = new ControlledIdentifierMember(stereotomy.newIdentifier()); diff --git a/grpc/README.md b/grpc/README.md new file mode 100644 index 0000000000..9592609d5b --- /dev/null +++ b/grpc/README.md @@ -0,0 +1,3 @@ +# GRPC/Protobuf + +Apollo protobuf and RPC services diff --git a/grpc/src/main/proto/choam.proto b/grpc/src/main/proto/choam.proto index 18d489be00..76f9aae791 100644 --- a/grpc/src/main/proto/choam.proto +++ b/grpc/src/main/proto/choam.proto @@ -1,7 +1,7 @@ syntax = "proto3"; option java_multiple_files = true; -option java_package = "com.salesfoce.apollo.choam.proto"; +option java_package = "com.salesforce.apollo.choam.proto"; option java_outer_classname = "ChoamProto"; option objc_class_prefix = "Chp"; import "crypto.proto"; @@ -88,13 +88,8 @@ message Assemble { crypto.Digeste nextView = 1; } -message Foundation { - repeated crypto.Digeste membership = 1; - stereotomy.KeyEvent_ authority = 2; -} - message FoundationSeal { - Foundation foundation = 1; + stereotomy.KeyEvent_ foundation = 1; crypto.Sig signature = 2; } diff --git a/grpc/src/main/proto/crypto.proto b/grpc/src/main/proto/crypto.proto index 316b7e7d00..8952d917ae 100644 --- a/grpc/src/main/proto/crypto.proto +++ b/grpc/src/main/proto/crypto.proto @@ -1,7 +1,7 @@ syntax = "proto3"; option java_multiple_files = true; -option java_package = "com.salesfoce.apollo.cryptography.proto"; +option java_package = "com.salesforce.apollo.cryptography.proto"; option java_outer_classname = "ApolloCryptoProto"; option objc_class_prefix = "Acr"; import "google/protobuf/any.proto"; diff --git a/grpc/src/main/proto/demesne.proto b/grpc/src/main/proto/demesne.proto index bb54f81dfb..571f9e2801 100644 --- a/grpc/src/main/proto/demesne.proto +++ b/grpc/src/main/proto/demesne.proto @@ -1,7 +1,7 @@ syntax = "proto3"; option java_multiple_files = true; -option java_package = "com.salesfoce.apollo.demesne.proto"; +option java_package = "com.salesforce.apollo.demesne.proto"; option java_outer_classname = "DemesneProto"; option objc_class_prefix = "Dmsp"; @@ -15,54 +15,54 @@ import "crypto.proto"; package apollo.demesne; message DemesneParameters { - string commDirectory = 1; - string portal = 2; - string parent = 3; - crypto.Digeste context = 4; - google.protobuf.Duration keepAlive = 5; - apollo.choam.FoundationSeal foundation = 6; - google.protobuf.Duration timeout = 7; - string loggingConfig = 8; - google.protobuf.Duration gossipInterval = 9; - int32 virtualThreads = 10; - double falsePositiveRate = 11; - int32 maxTransfer = 12; + string commDirectory = 1; + string portal = 2; + string parent = 3; + crypto.Digeste context = 4; + google.protobuf.Duration keepAlive = 5; + apollo.choam.FoundationSeal foundation = 6; + google.protobuf.Duration timeout = 7; + string loggingConfig = 8; + google.protobuf.Duration gossipInterval = 9; + int32 virtualThreads = 10; + double falsePositiveRate = 11; + int32 maxTransfer = 12; } message ViewChange { - crypto.Digeste view = 1; - repeated stereotomy.EventCoords joining = 2; - repeated crypto.Digeste leaving = 3; + crypto.Digeste view = 1; + repeated stereotomy.EventCoords joining = 2; + repeated crypto.Digeste leaving = 3; } service OuterContext { - rpc register (SubContext) returns(google.protobuf.Empty) {} - rpc deregister (crypto.Digeste) returns(google.protobuf.Empty) {} + rpc register (SubContext) returns(google.protobuf.Empty) {} + rpc deregister (crypto.Digeste) returns(google.protobuf.Empty) {} } message SubContext { - crypto.Digeste enclave = 1; - crypto.Digeste context = 2; + crypto.Digeste enclave = 1; + crypto.Digeste context = 2; } message DelegationUpdate { - int32 ring = 1; - crypto.Biff have = 2; - repeated SignedDelegate update = 3; + int32 ring = 1; + crypto.Biff have = 2; + repeated SignedDelegate update = 3; } message Delegate { - crypto.Digeste context = 1; - crypto.Digeste delegator = 2; - crypto.Digeste delegate = 3; + crypto.Digeste context = 1; + crypto.Digeste delegator = 2; + crypto.Digeste delegate = 3; } message SignedDelegate { - Delegate delegate = 1; - crypto.Sig signature = 2; + Delegate delegate = 1; + crypto.Sig signature = 2; } service Delegation { - rpc gossip( crypto.Biff ) returns (DelegationUpdate) {} - rpc update( DelegationUpdate ) returns (google.protobuf.Empty) {} + rpc gossip(crypto.Biff) returns (DelegationUpdate) {} + rpc update(DelegationUpdate) returns (google.protobuf.Empty) {} } diff --git a/grpc/src/main/proto/ethereal.proto b/grpc/src/main/proto/ethereal.proto index 21dd680512..da9f52823b 100644 --- a/grpc/src/main/proto/ethereal.proto +++ b/grpc/src/main/proto/ethereal.proto @@ -1,7 +1,7 @@ syntax = "proto3"; option java_multiple_files = true; -option java_package = "com.salesfoce.apollo.ethereal.proto"; +option java_package = "com.salesforce.apollo.ethereal.proto"; option java_outer_classname = "EtherealProto"; option objc_class_prefix = "Ep"; import "google/protobuf/empty.proto"; @@ -10,85 +10,85 @@ import "crypto.proto"; package ethereal; service Gossiper { - rpc gossip (Gossip) returns (Update) {} - rpc update (ContextUpdate) returns (google.protobuf.Empty) {} + rpc gossip (Gossip) returns (Update) {} + rpc update (ContextUpdate) returns (google.protobuf.Empty) {} } message Gossip { - int32 ring = 1; - crypto.Biff have = 2; - repeated Have haves = 3; + int32 ring = 1; + crypto.Biff have = 2; + repeated Have haves = 3; } message Have { - int32 epoch = 1; - crypto.Biff haveUnits = 2; - crypto.Biff havePreVotes = 3; - crypto.Biff haveCommits = 4; + int32 epoch = 1; + crypto.Biff haveUnits = 2; + crypto.Biff havePreVotes = 3; + crypto.Biff haveCommits = 4; } message Missing { - int32 epoch = 1; - repeated PreUnit_s units = 2; - repeated SignedPreVote prevotes = 3; - repeated SignedCommit commits = 4; - Have haves = 5; + int32 epoch = 1; + repeated PreUnit_s units = 2; + repeated SignedPreVote prevotes = 3; + repeated SignedCommit commits = 4; + Have haves = 5; } message Update { - repeated PreUnit_s missing = 1; - repeated PreUnit_s waiting = 2; - crypto.Biff have = 3; - repeated Missing missings = 4; + repeated PreUnit_s missing = 1; + repeated PreUnit_s waiting = 2; + crypto.Biff have = 3; + repeated Missing missings = 4; } -message ContextUpdate { - int32 ring = 1; - Update update = 2; +message ContextUpdate { + int32 ring = 1; + Update update = 2; } message PreVote { - int64 unit = 1; - crypto.Digeste hash = 2; - int32 source = 3; + int64 unit = 1; + crypto.Digeste hash = 2; + int32 source = 3; } message Commit { - int64 unit = 1; - crypto.Digeste hash = 2; - int32 source = 3; + int64 unit = 1; + crypto.Digeste hash = 2; + int32 source = 3; } message SignedPreVote { - crypto.Sig signature = 1; - PreVote vote = 2; + crypto.Sig signature = 1; + PreVote vote = 2; } message SignedCommit { - crypto.Sig signature = 1; - Commit commit = 2; + crypto.Sig signature = 1; + Commit commit = 2; } message EpochProof { - Proof msg = 1; - int32 owner = 2; - crypto.Sig signature = 3; + Proof msg = 1; + int32 owner = 2; + crypto.Sig signature = 3; } message Proof { - int64 encodedId = 1; - crypto.Digeste hash = 2; + int64 encodedId = 1; + crypto.Digeste hash = 2; } message PreUnit_s { - int64 id = 1; - Crown_s crown = 2; - bytes data = 3; - bytes salt = 4; - crypto.Sig signature = 5; + int64 id = 1; + Crown_s crown = 2; + bytes data = 3; + bytes salt = 4; + crypto.Sig signature = 5; } message Crown_s { - repeated int32 heights = 1; - crypto.Digeste controlHash = 2; + repeated int32 heights = 1; + crypto.Digeste controlHash = 2; } diff --git a/grpc/src/main/proto/fireflies.proto b/grpc/src/main/proto/fireflies.proto index 6ca906e68e..23edf780d6 100644 --- a/grpc/src/main/proto/fireflies.proto +++ b/grpc/src/main/proto/fireflies.proto @@ -1,7 +1,7 @@ syntax = "proto3"; option java_multiple_files = true; -option java_package = "com.salesfoce.apollo.fireflies.proto"; +option java_package = "com.salesforce.apollo.fireflies.proto"; option java_outer_classname = "FirefliesProto"; option objc_class_prefix = "Ff"; @@ -13,136 +13,150 @@ import "stereotomy.proto"; package fireflies; service Fireflies { - rpc gossip (SayWhat) returns (Gossip) {} - rpc update (State) returns (google.protobuf.Empty) {} + rpc gossip (SayWhat) returns (Gossip) {} + rpc update (State) returns (google.protobuf.Empty) {} } -message SayWhat { - crypto.Digeste view = 1; - SignedNote note = 2; - int32 ring = 3; - Digests gossip = 4; +message SayWhat { + crypto.Digeste view = 1; + SignedNote note = 2; + int32 ring = 3; + Digests gossip = 4; } -message State { - crypto.Digeste view = 1; - int32 ring = 2; - Update update = 3; +message State { + crypto.Digeste view = 1; + int32 ring = 2; + Update update = 3; } message Accusation { - int64 epoch = 1; - int32 ringNumber = 2; - crypto.Digeste accuser = 3; - crypto.Digeste accused = 4; - crypto.Digeste currentView = 5; + int64 epoch = 1; + int32 ringNumber = 2; + crypto.Digeste accuser = 3; + crypto.Digeste accused = 4; + crypto.Digeste currentView = 5; } message SignedAccusation { - Accusation accusation = 1; - crypto.Sig signature = 2; + Accusation accusation = 1; + crypto.Sig signature = 2; } message Note { - int64 epoch = 1; - crypto.Digeste currentView = 2; - stereotomy.EventCoords coordinates = 3; - bytes mask = 4; - string host = 5; - int32 port = 6; + int64 epoch = 1; + crypto.Digeste currentView = 2; + stereotomy.EventCoords coordinates = 3; + bytes mask = 4; + string host = 5; + int32 port = 6; } message ViewChange { - crypto.Digeste observer = 1; - crypto.Digeste current = 2; - int32 attempt = 3; - repeated crypto.Digeste joins = 4; - repeated crypto.Digeste leaves = 5; + crypto.Digeste observer = 1; + crypto.Digeste current = 2; + int32 attempt = 3; + repeated crypto.Digeste joins = 4; + repeated crypto.Digeste leaves = 5; } message SignedViewChange { - ViewChange change = 1; - crypto.Sig signature = 2; + ViewChange change = 1; + crypto.Sig signature = 2; } message SignedNote { - Note note = 1; - crypto.Sig signature = 2; + Note note = 1; + crypto.Sig signature = 2; } message AccusationGossip { - crypto.Biff bff = 1; - repeated SignedAccusation updates = 2; + crypto.Biff bff = 1; + repeated SignedAccusation updates = 2; } message NoteGossip { - crypto.Biff bff = 1; - repeated SignedNote updates = 2; + crypto.Biff bff = 1; + repeated SignedNote updates = 2; } message JoinGossip { - crypto.Biff bff = 1; - repeated SignedNote updates = 2; + crypto.Biff bff = 1; + repeated SignedNote updates = 2; } message ViewChangeGossip { - crypto.Biff bff = 1; - repeated SignedViewChange updates = 2; + crypto.Biff bff = 1; + repeated SignedViewChange updates = 2; } -message Digests { - crypto.Biff identityBff = 1; - crypto.Biff noteBff = 2; - crypto.Biff accusationBff = 3; - crypto.Biff observationBff = 4; - crypto.Biff joinBiff = 5; +message Digests { + crypto.Biff identityBff = 1; + crypto.Biff noteBff = 2; + crypto.Biff accusationBff = 3; + crypto.Biff observationBff = 4; + crypto.Biff joinBiff = 5; } message Gossip { - SignedNote redirect = 1; - NoteGossip notes = 3; - AccusationGossip accusations = 4; - ViewChangeGossip observations = 5; - JoinGossip joins = 6; + SignedNote redirect = 1; + NoteGossip notes = 3; + AccusationGossip accusations = 4; + ViewChangeGossip observations = 5; + JoinGossip joins = 6; } message Update { - repeated SignedNote notes = 1; - repeated SignedAccusation accusations = 2; - repeated SignedViewChange observations = 3; - repeated SignedNote joins = 4; + repeated SignedNote notes = 1; + repeated SignedAccusation accusations = 2; + repeated SignedViewChange observations = 3; + repeated SignedNote joins = 4; } // EĂ­sodos service Entrance { - rpc seed (Registration) returns (Redirect) {} - rpc join (Join) returns (Gateway) {} + rpc seed (Registration) returns (Redirect) {} + rpc join (Join) returns (Gateway) {} + + // Bootstrap validation + rpc validate(stereotomy.EventCoords) returns (Validation) {} + // Bootstrap verification key state + rpc keyState(stereotomy.IdentAndSeq) returns (stereotomy.KeyState_) {} +} + +message Validation { + bool result = 1; } -message Registration { - crypto.Digeste view = 1; - SignedNote note = 2; +message Registration { + crypto.Digeste view = 1; + SignedNote note = 2; } message Redirect { - crypto.Digeste view = 1; - int32 cardinality = 2; - int32 rings = 3; - bool bootstrap = 4; - repeated Seed_ successors = 6; + crypto.Digeste view = 1; + int32 cardinality = 2; + int32 rings = 3; + bool bootstrap = 4; + repeated Seed_ sample = 6; } message Seed_ { - SignedNote note = 1; - stereotomy.KeyState_ keyState = 5; + SignedNote note = 1; + stereotomy.KeyEvent_ establishment = 5; +} + +message Join { + crypto.Digeste view = 1; + SignedNote note = 2; } -message Join { - crypto.Digeste view = 1; - SignedNote note = 2; +message Gateway { + BootstrapTrust trust = 1; + repeated SignedNote initialSeedSet = 2; } -message Gateway { - crypto.HexBloome diadem = 1; - repeated SignedNote initialSeedSet = 2; +message BootstrapTrust { + crypto.HexBloome diadem = 1; + repeated SignedNote successors = 2; } diff --git a/grpc/src/main/proto/gorgoneion.proto b/grpc/src/main/proto/gorgoneion.proto index 194c3669ef..cb61525b8e 100644 --- a/grpc/src/main/proto/gorgoneion.proto +++ b/grpc/src/main/proto/gorgoneion.proto @@ -1,7 +1,7 @@ syntax = "proto3"; option java_multiple_files = true; -option java_package = "com.salesfoce.apollo.gorgoneion.proto"; +option java_package = "com.salesforce.apollo.gorgoneion.proto"; option java_outer_classname = "GorgoneionProto"; option objc_class_prefix = "Gp"; @@ -15,51 +15,51 @@ import "crypto.proto"; package gorgoneion; service Admissions { - rpc apply (stereotomy.KERL_) returns (SignedNonce) {} - rpc register (Credentials) returns (stereotomy.Validations) {} + rpc apply (stereotomy.KERL_) returns (SignedNonce) {} + rpc register (Credentials) returns (stereotomy.Validations) {} } service Endorsement { - rpc endorse (Nonce)returns (MemberSignature) {} - rpc validate (Credentials)returns (stereotomy.Validation_) {} - rpc enroll (Notarization) returns (google.protobuf.Empty) {} + rpc endorse (Nonce)returns (MemberSignature) {} + rpc validate (Credentials)returns (stereotomy.Validation_) {} + rpc enroll (Notarization) returns (google.protobuf.Empty) {} } message MemberSignature { - crypto.Digeste id = 1; - crypto.Sig signature = 2; + crypto.Digeste id = 1; + crypto.Sig signature = 2; } message SignedNonce { - Nonce nonce = 1; - repeated MemberSignature signatures = 2; + Nonce nonce = 1; + repeated MemberSignature signatures = 2; } message Nonce { - stereotomy.Ident member = 1; - crypto.Digeste issuer = 2; - crypto.Digeste noise = 3; - google.protobuf.Timestamp timestamp = 4; + stereotomy.Ident member = 1; + crypto.Digeste issuer = 2; + crypto.Digeste noise = 3; + google.protobuf.Timestamp timestamp = 4; } -message Credentials { - SignedNonce nonce = 1; - SignedAttestation attestation = 2; +message Credentials { + SignedNonce nonce = 1; + SignedAttestation attestation = 2; } message Attestation { - stereotomy.KERL_ kerl = 1; - crypto.Sig nonce = 2; - google.protobuf.Timestamp timestamp = 3; - google.protobuf.Any attestation = 4; + stereotomy.KERL_ kerl = 1; + crypto.Sig nonce = 2; + google.protobuf.Timestamp timestamp = 3; + google.protobuf.Any attestation = 4; } message SignedAttestation { - Attestation attestation = 1; - crypto.Sig signature = 2; + Attestation attestation = 1; + crypto.Sig signature = 2; } message Notarization { - stereotomy.KERL_ kerl = 1; - stereotomy.Validations validations = 2; + stereotomy.KERL_ kerl = 1; + stereotomy.Validations validations = 2; } diff --git a/grpc/src/main/proto/leyden.proto b/grpc/src/main/proto/leyden.proto new file mode 100644 index 0000000000..03c6b2d9f4 --- /dev/null +++ b/grpc/src/main/proto/leyden.proto @@ -0,0 +1,63 @@ +syntax = "proto3"; + +option java_multiple_files = true; +option java_package = "com.salesforce.apollo.leyden.proto"; +option java_outer_classname = "LeydenProto"; +option objc_class_prefix = "Ley"; + +import "google/protobuf/empty.proto"; +import "google/protobuf/any.proto"; + +import "stereotomy.proto"; +import "stereotomy-services.proto"; +import "crypto.proto"; + +package leyden; + +service Binder { + rpc bind(Binding) returns(google.protobuf.Empty) {} + rpc unbind(KeyAndToken) returns(google.protobuf.Empty) {} + rpc get(KeyAndToken) returns(Bound) {} +} + +service Reconciliation { + rpc reconcile (Intervals) returns (Update) {} + rpc update (Updating) returns (google.protobuf.Empty) {} +} + +message KeyAndToken { + bytes key = 1; + bytes token = 2; +} + +message Update { + repeated Bound bindings = 1; + repeated Interval intervals = 2; + crypto.Biff have = 3; +} + +message Updating { + int32 ring = 1; + repeated Bound bindings = 2; +} + +message Intervals { + int32 ring = 1; + repeated Interval intervals = 2; + crypto.Biff have = 3; +} + +message Interval { + crypto.Digeste start = 1; + crypto.Digeste end = 2; +} + +message Binding { + Bound bound = 1; + bytes token = 2; +} + +message Bound { + bytes key = 1; + bytes value = 2; +} diff --git a/grpc/src/main/proto/messaging.proto b/grpc/src/main/proto/messaging.proto index 59ae0f4721..a75beeeb66 100644 --- a/grpc/src/main/proto/messaging.proto +++ b/grpc/src/main/proto/messaging.proto @@ -1,51 +1,50 @@ syntax = "proto3"; option java_multiple_files = true; -option java_package = "com.salesfoce.apollo.messaging.proto"; +option java_package = "com.salesforce.apollo.messaging.proto"; option java_outer_classname = "MessagingProto"; option objc_class_prefix = "Mp"; import "google/protobuf/empty.proto"; -import "google/protobuf/any.proto"; import "crypto.proto"; package messaging; message MessageBff { - int32 ring = 1; - crypto.Biff digests = 2; + int32 ring = 1; + crypto.Biff digests = 2; } message ByteMessage { - bytes contents = 1; + bytes contents = 1; } service RBC { - rpc gossip (MessageBff) returns (Reconcile) {} - rpc update (ReconcileContext) returns (google.protobuf.Empty) {} + rpc gossip (MessageBff) returns (Reconcile) {} + rpc update (ReconcileContext) returns (google.protobuf.Empty) {} } message Reconcile { - repeated AgedMessage updates = 1; - crypto.Biff digests = 2; + repeated AgedMessage updates = 1; + crypto.Biff digests = 2; } message ReconcileContext { - int32 ring = 1; - repeated AgedMessage updates = 2; + int32 ring = 1; + repeated AgedMessage updates = 2; } message AgedMessage { - int32 age = 1; - bytes content = 3; + int32 age = 1; + bytes content = 3; } message DefaultMessage { - crypto.Digeste source = 1; - int32 nonce = 2; - bytes content = 3; + crypto.Digeste source = 1; + int32 nonce = 2; + bytes content = 3; } message SignedDefaultMessage { - DefaultMessage content = 1; - crypto.Sig signature = 2; + DefaultMessage content = 1; + crypto.Sig signature = 2; } diff --git a/grpc/src/main/proto/sql-state.proto b/grpc/src/main/proto/sql-state.proto index 78017bac6c..b3b77722f9 100644 --- a/grpc/src/main/proto/sql-state.proto +++ b/grpc/src/main/proto/sql-state.proto @@ -1,92 +1,92 @@ syntax = "proto3"; option java_multiple_files = true; -option java_package = "com.salesfoce.apollo.state.proto"; +option java_package = "com.salesforce.apollo.state.proto"; option java_outer_classname = "SqlState"; option objc_class_prefix = "Ss"; package apollo.sqlState; message Batch{ - repeated string statements = 1; + repeated string statements = 1; } message Arguments { - int32 version = 1; - bytes args = 2; + int32 version = 1; + bytes args = 2; } message Statement { - EXECUTION execution = 1; - string sql = 2; - Arguments args = 3; + EXECUTION execution = 1; + string sql = 2; + Arguments args = 3; } message Call { - EXECUTION execution = 1; - string sql = 2; - Arguments args = 3; - repeated int32 outParameters = 4; // value is enum of java.sql.SQLType + EXECUTION execution = 1; + string sql = 2; + Arguments args = 3; + repeated int32 outParameters = 4; // value is enum of java.sql.SQLType } message Script { - string source = 1; - string className = 2; - string method = 3; - Arguments args = 4; + string source = 1; + string className = 2; + string method = 3; + Arguments args = 4; } enum EXECUTION { - INVALID = 0; - EXECUTE = 1; - QUERY = 2; - UPDATE = 3; + INVALID = 0; + EXECUTE = 1; + QUERY = 2; + UPDATE = 3; } message BatchedTransaction { - repeated Txn transactions = 1; + repeated Txn transactions = 1; } message BatchUpdate { - string sql = 1; - repeated Arguments batch = 2; + string sql = 1; + repeated Arguments batch = 2; } message Migration { - string liquibaseSchemaName = 1; - oneof command { - ChangeLog update = 2; - ChangeLog rollback = 3; - ChangeLog changelogSync = 4; - bool clearCheckSums = 5; - Drop drop = 6; - string tag = 7; - } + string liquibaseSchemaName = 1; + oneof command { + ChangeLog update = 2; + ChangeLog rollback = 3; + ChangeLog changelogSync = 4; + bool clearCheckSums = 5; + Drop drop = 6; + string tag = 7; + } } message Txn { - oneof execution { - Statement statement = 1; - Call call = 2; - Batch batch = 3; - BatchUpdate batchUpdate = 4; - Script script = 5; - BatchedTransaction batched = 6; - Migration migration = 7; - } + oneof execution { + Statement statement = 1; + Call call = 2; + Batch batch = 3; + BatchUpdate batchUpdate = 4; + Script script = 5; + BatchedTransaction batched = 6; + Migration migration = 7; + } } message ChangeLog { - oneof type { - int32 count = 1; - string tag = 2; - } - string context = 3; - string labels = 4; - string root = 5; - bytes resources = 6; + oneof type { + int32 count = 1; + string tag = 2; + } + string context = 3; + string labels = 4; + string root = 5; + bytes resources = 6; } message Drop { - string schemas = 1; - string catalog = 2; -} \ No newline at end of file + string schemas = 1; + string catalog = 2; +} diff --git a/grpc/src/main/proto/stereotomy-services.proto b/grpc/src/main/proto/stereotomy-services.proto index e3b39e1e24..ab42cd853e 100644 --- a/grpc/src/main/proto/stereotomy-services.proto +++ b/grpc/src/main/proto/stereotomy-services.proto @@ -1,7 +1,7 @@ syntax = "proto3"; option java_multiple_files = true; -option java_package = "com.salesfoce.apollo.stereotomy.services.grpc.proto"; +option java_package = "com.salesforce.apollo.stereotomy.services.grpc.proto"; option java_outer_classname = "StereotomyServicesProto"; option objc_class_prefix = "Stereo"; import "google/protobuf/wrappers.proto"; diff --git a/grpc/src/main/proto/stereotomy.proto b/grpc/src/main/proto/stereotomy.proto index 02e69c790b..ddbb098413 100644 --- a/grpc/src/main/proto/stereotomy.proto +++ b/grpc/src/main/proto/stereotomy.proto @@ -1,10 +1,11 @@ syntax = "proto3"; option java_multiple_files = true; -option java_package = "com.salesfoce.apollo.stereotomy.event.proto"; +option java_package = "com.salesforce.apollo.stereotomy.event.proto"; option java_outer_classname = "StereotomyProto"; option objc_class_prefix = "Stereo"; import "crypto.proto"; +import "google/protobuf/any.proto"; package stereotomy; @@ -52,6 +53,7 @@ message Sealed { EventLoc event = 3; bytes bits = 4; } + google.protobuf.Any value = 5; } // Common header for events diff --git a/grpc/src/main/proto/test.proto b/grpc/src/main/proto/test.proto index e89c917796..c614c609bf 100644 --- a/grpc/src/main/proto/test.proto +++ b/grpc/src/main/proto/test.proto @@ -1,23 +1,23 @@ syntax = "proto3"; option java_multiple_files = true; -option java_package = "com.salesfoce.apollo.test.proto"; +option java_package = "com.salesforce.apollo.test.proto"; option java_outer_classname = "ApolloTestProto"; option objc_class_prefix = "ATp"; import "google/protobuf/any.proto"; -package apollo.test; +package apollo.test; service TestIt { - rpc ping ( google.protobuf.Any ) returns (google.protobuf.Any) {} + rpc ping (google.protobuf.Any) returns (google.protobuf.Any) {} } message ByteMessage { - bytes contents = 1; + bytes contents = 1; } message PeerCreds { - int32 pid = 1; - int32 uid = 2; - repeated int32 gids = 3; + int32 pid = 1; + int32 uid = 2; + repeated int32 gids = 3; } diff --git a/grpc/src/main/proto/thoth.proto b/grpc/src/main/proto/thoth.proto index 98df516df3..5bc4ae53a8 100644 --- a/grpc/src/main/proto/thoth.proto +++ b/grpc/src/main/proto/thoth.proto @@ -1,7 +1,7 @@ syntax = "proto3"; option java_multiple_files = true; -option java_package = "com.salesfoce.apollo.thoth.proto"; +option java_package = "com.salesforce.apollo.thoth.proto"; option java_outer_classname = "ThothProto"; option objc_class_prefix = "Tp"; @@ -41,6 +41,13 @@ service Reconciliation { rpc update (Updating) returns (google.protobuf.Empty) {} } +service Thoth_ { + rpc commit (stereotomy.EventCoords) returns (google.protobuf.Empty) {} + rpc identifier(google.protobuf.Empty) returns (stereotomy.Ident) {} + rpc inception(stereotomy.Ident) returns (stereotomy.InceptionEvent) {} + rpc rotate(google.protobuf.Empty) returns (stereotomy.RotationEvent) {} +} + message Update { repeated stereotomy.KeyEventWithAttachmentAndValidations_ events = 1; repeated Interval intervals = 2; diff --git a/isolate-ftesting/pom.xml b/isolate-ftesting/pom.xml index 32a1ea5fe3..1b56b64567 100644 --- a/isolate-ftesting/pom.xml +++ b/isolate-ftesting/pom.xml @@ -10,6 +10,23 @@ Isolate Testing Functional testing for Apollo Demesnes Isolates + + com.salesforce.apollo + domain-kqueue + test + + + com.salesforce.apollo + domain-epoll + test + + + io.netty + netty-transport-native-unix-common + ${netty.version} + ${os.detected.classifier} + test + com.salesforce.apollo isolates @@ -45,19 +62,12 @@ - - io.netty - netty-transport-native-unix-common - ${netty.version} - ${os.detected.classifier} - test - io.netty netty-transport-native-kqueue ${netty.version} ${os.detected.classifier} - optional + test @@ -69,19 +79,12 @@ - - io.netty - netty-transport-native-unix-common - ${netty.version} - ${os.detected.classifier} - test - io.netty netty-transport-native-epoll ${netty.version} ${os.detected.classifier} - optional + test diff --git a/isolate-ftesting/src/test/java/com/salesforce/apollo/domain/DemesneIsolateTest.java b/isolate-ftesting/src/test/java/com/salesforce/apollo/domain/DemesneIsolateTest.java index 0975b1f092..c43ceed88d 100644 --- a/isolate-ftesting/src/test/java/com/salesforce/apollo/domain/DemesneIsolateTest.java +++ b/isolate-ftesting/src/test/java/com/salesforce/apollo/domain/DemesneIsolateTest.java @@ -6,9 +6,9 @@ */ package com.salesforce.apollo.domain; -import com.salesfoce.apollo.cryptography.proto.Digeste; -import com.salesfoce.apollo.demesne.proto.DemesneParameters; -import com.salesfoce.apollo.demesne.proto.SubContext; +import com.salesforce.apollo.cryptography.proto.Digeste; +import com.salesforce.apollo.demesne.proto.DemesneParameters; +import com.salesforce.apollo.demesne.proto.SubContext; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.RouterImpl; import com.salesforce.apollo.archipelago.ServerConnectionCache; diff --git a/isolates/pom.xml b/isolates/pom.xml index 8819bfcb78..633c8fb360 100644 --- a/isolates/pom.xml +++ b/isolates/pom.xml @@ -14,6 +14,16 @@ libdemesne + + com.salesforce.apollo + domain-kqueue + provided + + + com.salesforce.apollo + domain-epoll + provided + com.salesforce.apollo model @@ -266,17 +276,12 @@ - - io.netty - netty-transport-native-unix-common - ${netty.version} - ${os.detected.classifier} - io.netty netty-transport-native-kqueue ${netty.version} ${os.detected.classifier} + provided @@ -288,17 +293,12 @@ - - io.netty - netty-transport-native-unix-common - ${netty.version} - ${os.detected.classifier} - io.netty netty-transport-native-epoll ${netty.version} ${os.detected.classifier} + provided diff --git a/isolates/src/main/java/com/salesforce/apollo/demesnes/isolate/DemesneIsolate.java b/isolates/src/main/java/com/salesforce/apollo/demesnes/isolate/DemesneIsolate.java index 374234d87d..e110aa0a61 100644 --- a/isolates/src/main/java/com/salesforce/apollo/demesnes/isolate/DemesneIsolate.java +++ b/isolates/src/main/java/com/salesforce/apollo/demesnes/isolate/DemesneIsolate.java @@ -7,10 +7,10 @@ package com.salesforce.apollo.demesnes.isolate; import com.google.protobuf.InvalidProtocolBufferException; -import com.salesfoce.apollo.cryptography.proto.Digeste; -import com.salesfoce.apollo.demesne.proto.DemesneParameters; -import com.salesfoce.apollo.demesne.proto.ViewChange; -import com.salesfoce.apollo.stereotomy.event.proto.*; +import com.salesforce.apollo.cryptography.proto.Digeste; +import com.salesforce.apollo.demesne.proto.DemesneParameters; +import com.salesforce.apollo.demesne.proto.ViewChange; +import com.salesforce.apollo.stereotomy.event.proto.*; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.model.demesnes.Demesne; import com.salesforce.apollo.model.demesnes.DemesneImpl; diff --git a/isolates/src/test/java/com/salesforce/apollo/demesnes/DemesneSmoke.java b/isolates/src/test/java/com/salesforce/apollo/demesnes/DemesneSmoke.java index 23960d7027..d5dc7ecb36 100644 --- a/isolates/src/test/java/com/salesforce/apollo/demesnes/DemesneSmoke.java +++ b/isolates/src/test/java/com/salesforce/apollo/demesnes/DemesneSmoke.java @@ -8,17 +8,13 @@ import com.google.protobuf.Any; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.cryptography.proto.Digeste; -import com.salesfoce.apollo.demesne.proto.DemesneParameters; -import com.salesfoce.apollo.demesne.proto.SubContext; -import com.salesfoce.apollo.test.proto.ByteMessage; -import com.salesfoce.apollo.test.proto.TestItGrpc; -import com.salesfoce.apollo.test.proto.TestItGrpc.TestItBlockingStub; -import com.salesfoce.apollo.test.proto.TestItGrpc.TestItImplBase; import com.salesforce.apollo.archipelago.*; import com.salesforce.apollo.comm.grpc.DomainSocketServerInterceptor; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; +import com.salesforce.apollo.cryptography.proto.Digeste; +import com.salesforce.apollo.demesne.proto.DemesneParameters; +import com.salesforce.apollo.demesne.proto.SubContext; import com.salesforce.apollo.membership.Member; import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; import com.salesforce.apollo.model.demesnes.DemesneImpl; @@ -37,6 +33,10 @@ import com.salesforce.apollo.stereotomy.mem.MemKERL; import com.salesforce.apollo.stereotomy.mem.MemKeyStore; import com.salesforce.apollo.stereotomy.services.proto.ProtoKERLAdapter; +import com.salesforce.apollo.test.proto.ByteMessage; +import com.salesforce.apollo.test.proto.TestItGrpc; +import com.salesforce.apollo.test.proto.TestItGrpc.TestItBlockingStub; +import com.salesforce.apollo.test.proto.TestItGrpc.TestItImplBase; import io.grpc.*; import io.grpc.ForwardingClientCall.SimpleForwardingClientCall; import io.grpc.netty.DomainSocketNegotiatorHandler.DomainSocketNegotiator; @@ -69,8 +69,9 @@ public class DemesneSmoke { private final static Class clientChannelType = IMPL.getChannelType(); private static final Class serverChannelType = IMPL.getServerDomainSocketChannelClass(); - private final static Executor executor = Executors.newVirtualThreadPerTaskExecutor(); - private EventLoopGroup eventLoopGroup; + + private final static Executor executor = Executors.newVirtualThreadPerTaskExecutor(); + private EventLoopGroup eventLoopGroup; public static ClientInterceptor clientInterceptor(Digest ctx) { return new ClientInterceptor() { diff --git a/isolates/src/test/java/com/salesforce/apollo/demesnes/FireFliesTrace.java b/isolates/src/test/java/com/salesforce/apollo/demesnes/FireFliesTrace.java index b279f6257a..308d9deb5d 100644 --- a/isolates/src/test/java/com/salesforce/apollo/demesnes/FireFliesTrace.java +++ b/isolates/src/test/java/com/salesforce/apollo/demesnes/FireFliesTrace.java @@ -6,8 +6,6 @@ */ package com.salesforce.apollo.demesnes; -import com.salesfoce.apollo.choam.proto.Foundation; -import com.salesfoce.apollo.choam.proto.FoundationSeal; import com.salesforce.apollo.archipelago.LocalServer; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.ServerConnectionCache; @@ -15,6 +13,7 @@ import com.salesforce.apollo.choam.Parameters.Builder; import com.salesforce.apollo.choam.Parameters.ProducerParameters; import com.salesforce.apollo.choam.Parameters.RuntimeParameters; +import com.salesforce.apollo.choam.proto.FoundationSeal; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.delphinius.Oracle; @@ -25,9 +24,9 @@ import com.salesforce.apollo.membership.Context; import com.salesforce.apollo.membership.ContextImpl; import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; +import com.salesforce.apollo.model.ProcessContainerDomain; import com.salesforce.apollo.model.ProcessDomain; import com.salesforce.apollo.stereotomy.EventCoordinates; -import com.salesforce.apollo.stereotomy.EventValidation; import com.salesforce.apollo.stereotomy.StereotomyImpl; import com.salesforce.apollo.stereotomy.identifier.spec.IdentifierSpecification; import com.salesforce.apollo.stereotomy.mem.MemKERL; @@ -42,7 +41,6 @@ import java.util.*; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; @@ -196,20 +194,22 @@ public void before() throws Exception { }).collect(Collectors.toMap(controlled -> controlled.getIdentifier().getDigest(), controlled -> controlled)); Digest group = DigestAlgorithm.DEFAULT.getOrigin(); - var foundation = Foundation.newBuilder(); - identities.keySet().forEach(d -> foundation.addMembership(d.toDigeste())); - var sealed = FoundationSeal.newBuilder().setFoundation(foundation).build(); + var sealed = FoundationSeal.newBuilder().build(); identities.forEach((digest, id) -> { var context = new ContextImpl<>(DigestAlgorithm.DEFAULT.getLast(), CARDINALITY, 0.2, 3); final var member = new ControlledIdentifierMember(id); var localRouter = new LocalServer(prefix, member).router(ServerConnectionCache.newBuilder().setTarget(30)); - var node = new ProcessDomain(group, member, params, "jdbc:h2:mem:", checkpointDirBase, - RuntimeParameters.newBuilder() - .setFoundation(sealed) - .setContext(context) - .setCommunications(localRouter), new InetSocketAddress(0), - commsDirectory, ffParams, EventValidation.NONE, - IdentifierSpecification.newBuilder()); + var pdParams = new ProcessDomain.ProcessDomainParameters("jdbc:h2:mem:", Duration.ofMinutes(1), + checkpointDirBase, Duration.ofMillis(10), 0.00125, + Duration.ofMinutes(1), 3, 10, 0.1); + var node = new ProcessContainerDomain(group, member, pdParams, params, RuntimeParameters.newBuilder() + .setFoundation( + sealed) + .setContext(context) + .setCommunications( + localRouter), + new InetSocketAddress(0), commsDirectory, ffParams, + IdentifierSpecification.newBuilder(), null); domains.add(node); routers.put(node, localRouter); localRouter.start(); @@ -221,16 +221,10 @@ public void smokin() throws Exception { long then = System.currentTimeMillis(); final var countdown = new CountDownLatch(domains.size()); final var seeds = Collections.singletonList( - new Seed(domains.get(0).getMember().getEvent().getCoordinates(), new InetSocketAddress(0))); + new Seed(domains.get(0).getMember().getEvent(), new InetSocketAddress(0))); domains.forEach(d -> { var listener = new View.ViewLifecycleListener() { - @Override - public void update(EventCoordinates update) { - // TODO Auto-generated method stub - - } - @Override public void viewChange(Context context, Digest viewId, List joins, List leaves) { @@ -251,19 +245,14 @@ public void viewChange(Context context, Digest viewId, List(new CountDownLatch(1)); - domains.get(0) - .getFoundation() - .start(() -> started.get().countDown(), gossipDuration, Collections.emptyList(), - Executors.newScheduledThreadPool(2, Thread.ofVirtual().factory())); + domains.get(0).getFoundation().start(() -> started.get().countDown(), gossipDuration, Collections.emptyList()); if (!started.get().await(10, TimeUnit.SECONDS)) { throw new IllegalStateException("Cannot start up kernel"); } started.set(new CountDownLatch(CARDINALITY - 1)); domains.subList(1, domains.size()).forEach(d -> { - d.getFoundation() - .start(() -> started.get().countDown(), gossipDuration, seeds, - Executors.newScheduledThreadPool(1, Thread.ofVirtual().factory())); + d.getFoundation().start(() -> started.get().countDown(), gossipDuration, seeds); }); if (!started.get().await(10, TimeUnit.SECONDS)) { throw new IllegalStateException("Cannot start views"); diff --git a/leyden/README.md b/leyden/README.md new file mode 100644 index 0000000000..acec437ba3 --- /dev/null +++ b/leyden/README.md @@ -0,0 +1,3 @@ +# Leyden + +Experimental general BFT DHT with token authenticated operations diff --git a/leyden/pom.xml b/leyden/pom.xml new file mode 100644 index 0000000000..80cff0936e --- /dev/null +++ b/leyden/pom.xml @@ -0,0 +1,48 @@ + + + 4.0.0 + + com.salesforce.apollo + apollo.app + 0.0.1-SNAPSHOT + + leyden + Leyden + Experimental BFT DHT + + + + com.salesforce.apollo + memberships + + + com.h2database + h2-mvstore + + + + + org.junit.jupiter + junit-jupiter-api + test + + + org.mockito + mockito-core + test + + + ch.qos.logback + logback-classic + test + + + ch.qos.logback + logback-core + test + + + + diff --git a/model/src/main/java/com/salesforce/apollo/model/ProtobufDatatype.java b/leyden/src/main/java/com/salesforce/apollo/leyden/BoundDatatype.java similarity index 52% rename from model/src/main/java/com/salesforce/apollo/model/ProtobufDatatype.java rename to leyden/src/main/java/com/salesforce/apollo/leyden/BoundDatatype.java index 372bfcc60e..dd4b7e6dba 100644 --- a/model/src/main/java/com/salesforce/apollo/model/ProtobufDatatype.java +++ b/leyden/src/main/java/com/salesforce/apollo/leyden/BoundDatatype.java @@ -4,50 +4,50 @@ * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ -package com.salesforce.apollo.model; - -import java.nio.ByteBuffer; -import java.util.function.Function; +package com.salesforce.apollo.leyden; +import com.google.protobuf.InvalidProtocolBufferException; +import com.salesforce.apollo.leyden.proto.Bound; import org.h2.mvstore.DataUtils; import org.h2.mvstore.WriteBuffer; import org.h2.mvstore.type.BasicDataType; -import com.google.protobuf.Message; +import java.nio.ByteBuffer; /** * @author hal.hildebrand - * */ -public final class ProtobufDatatype extends BasicDataType { - private Function factory; +public final class BoundDatatype extends BasicDataType { - private ProtobufDatatype(Function factory) { - this.factory = factory; + @Override + public int compare(Bound a, Bound b) { + return super.compare(a, b); } @Override - public Type[] createStorage(int size) { - @SuppressWarnings("unchecked") - final var storage = (Type[]) new Object[size]; - return storage; + public Bound[] createStorage(int size) { + return new Bound[size]; } @Override - public int getMemory(Type data) { + public int getMemory(Bound data) { return data.getSerializedSize(); } @Override - public Type read(ByteBuffer buff) { + public Bound read(ByteBuffer buff) { int size = DataUtils.readVarInt(buff); byte[] data = new byte[size]; buff.get(data); - return factory.apply(data); + try { + return Bound.parseFrom(buff); + } catch (InvalidProtocolBufferException e) { + throw new IllegalArgumentException(e); + } } @Override - public void write(WriteBuffer buff, Type data) { + public void write(WriteBuffer buff, Bound data) { buff.putVarInt(data.getSerializedSize()); buff.put(data.toByteString().toByteArray()); } diff --git a/leyden/src/main/java/com/salesforce/apollo/leyden/CombinedIntervals.java b/leyden/src/main/java/com/salesforce/apollo/leyden/CombinedIntervals.java new file mode 100644 index 0000000000..3495222f38 --- /dev/null +++ b/leyden/src/main/java/com/salesforce/apollo/leyden/CombinedIntervals.java @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2022, salesforce.com, inc. + * All rights reserved. + * SPDX-License-Identifier: BSD-3-Clause + * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause + */ + +package com.salesforce.apollo.leyden; + +import com.salesforce.apollo.cryptography.Digest; +import com.salesforce.apollo.leyden.proto.Interval; + +import java.util.*; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * @author hal.hildebrand + */ +public class CombinedIntervals implements Predicate { + private final List intervals = new ArrayList<>(); + + public CombinedIntervals(KeyInterval... allIntervals) { + this(Arrays.asList(allIntervals)); + } + + public CombinedIntervals(List allIntervals) { + if (allIntervals.isEmpty()) { + return; + } + Collections.sort(allIntervals, new Comparator() { + @Override + public int compare(KeyInterval o1, KeyInterval o2) { + int comparison = o1.getBegin().compareTo(o2.getBegin()); + + return comparison == 0 // if both intervals begin the same + ? o1.getEnd().compareTo(o2.getEnd()) // compare their ends + : comparison; + } + }); + KeyInterval current = allIntervals.get(0); + intervals.add(current); + for (int i = 1; i < allIntervals.size(); i++) { + KeyInterval next = allIntervals.get(i); + + int compare = current.getEnd().compareTo(next.getBegin()); + if (compare < 0) { + intervals.add(next); + current = next; + } else { + // overlapping intervals + current = new KeyInterval(current.getBegin(), next.getEnd()); + intervals.set(intervals.size() - 1, current); + } + } + } + + public Stream intervals() { + return intervals.stream(); + } + + @Override + public boolean test(Digest t) { + return intervals.stream().filter(i -> i.test(t)).findFirst().isPresent(); + } + + public List toIntervals() { + return intervals.stream() + .map(e -> Interval.newBuilder() + .setStart(e.getBegin().toDigeste()) + .setEnd(e.getEnd().toDigeste()) + .build()) + .collect(Collectors.toList()); + } + + @Override + public String toString() { + return "CombinedIntervals [intervals=" + intervals + "]"; + } +} diff --git a/leyden/src/main/java/com/salesforce/apollo/leyden/DigestDatatype.java b/leyden/src/main/java/com/salesforce/apollo/leyden/DigestDatatype.java new file mode 100644 index 0000000000..1e3bcede62 --- /dev/null +++ b/leyden/src/main/java/com/salesforce/apollo/leyden/DigestDatatype.java @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2022, salesforce.com, inc. + * All rights reserved. + * SPDX-License-Identifier: BSD-3-Clause + * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause + */ +package com.salesforce.apollo.leyden; + +import com.salesforce.apollo.cryptography.Digest; +import com.salesforce.apollo.cryptography.DigestAlgorithm; +import org.h2.mvstore.WriteBuffer; +import org.h2.mvstore.type.BasicDataType; + +import java.nio.ByteBuffer; + +/** + * @author hal.hildebrand + */ +public final class DigestDatatype extends BasicDataType { + private final DigestAlgorithm algorithm; + + public DigestDatatype(DigestAlgorithm algorithm) { + this.algorithm = algorithm; + } + + @Override + public int compare(Digest a, Digest b) { + return a.compareTo(b); + } + + @Override + public Digest[] createStorage(int size) { + return new Digest[size]; + } + + @Override + public int getMemory(Digest data) { + return algorithm.longLength() * 8; + } + + @Override + public Digest read(ByteBuffer buff) { + byte[] data = new byte[algorithm.longLength() * 8]; + buff.get(data); + return new Digest(algorithm, data); + } + + @Override + public void write(WriteBuffer buff, Digest data) { + buff.put(data.getBytes()); + } +} diff --git a/leyden/src/main/java/com/salesforce/apollo/leyden/KeyInterval.java b/leyden/src/main/java/com/salesforce/apollo/leyden/KeyInterval.java new file mode 100644 index 0000000000..e1db6ca8d4 --- /dev/null +++ b/leyden/src/main/java/com/salesforce/apollo/leyden/KeyInterval.java @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2022, salesforce.com, inc. + * All rights reserved. + * SPDX-License-Identifier: BSD-3-Clause + * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause + */ + +package com.salesforce.apollo.leyden; + +import com.salesforce.apollo.cryptography.Digest; +import com.salesforce.apollo.leyden.proto.Interval; + +import java.util.function.Predicate; + +/** + * @author hal.hildebrand + */ +public class KeyInterval implements Predicate { + private final Digest begin; + private final Digest end; + + public KeyInterval(Digest begin, Digest end) { + assert begin.compareTo(end) < 0 : begin + " >= " + end; + this.begin = begin; + this.end = end; + } + + public KeyInterval(Interval interval) { + this(Digest.from(interval.getStart()), Digest.from(interval.getEnd())); + } + + public Digest getBegin() { + return begin; + } + + public Digest getEnd() { + return end; + } + + @Override + public boolean test(Digest t) { + return begin.compareTo(t) > 0 && end.compareTo(t) > 0; + } + + public Interval toInterval() { + return Interval.newBuilder().setStart(begin.toDigeste()).setEnd(end.toDigeste()).build(); + } + + @Override + public String toString() { + return String.format("KeyInterval [begin=%s, end=%s]", begin, end); + } +} diff --git a/leyden/src/main/java/com/salesforce/apollo/leyden/LeydenJar.java b/leyden/src/main/java/com/salesforce/apollo/leyden/LeydenJar.java new file mode 100644 index 0000000000..a37cb1210d --- /dev/null +++ b/leyden/src/main/java/com/salesforce/apollo/leyden/LeydenJar.java @@ -0,0 +1,591 @@ +package com.salesforce.apollo.leyden; + +import com.google.common.collect.HashMultiset; +import com.google.common.collect.Multiset; +import com.google.common.collect.Ordering; +import com.salesforce.apollo.archipelago.Router; +import com.salesforce.apollo.archipelago.RouterImpl; +import com.salesforce.apollo.bloomFilters.BloomFilter; +import com.salesforce.apollo.cryptography.Digest; +import com.salesforce.apollo.cryptography.DigestAlgorithm; +import com.salesforce.apollo.cryptography.proto.Biff; +import com.salesforce.apollo.leyden.comm.binding.*; +import com.salesforce.apollo.leyden.comm.reconcile.*; +import com.salesforce.apollo.leyden.proto.*; +import com.salesforce.apollo.membership.Context; +import com.salesforce.apollo.membership.Member; +import com.salesforce.apollo.membership.Ring; +import com.salesforce.apollo.membership.SigningMember; +import com.salesforce.apollo.ring.RingCommunications; +import com.salesforce.apollo.ring.RingIterator; +import com.salesforce.apollo.utils.Entropy; +import com.salesforce.apollo.utils.Hex; +import com.salesforce.apollo.utils.Utils; +import io.grpc.Status; +import io.grpc.StatusRuntimeException; +import org.h2.mvstore.MVMap; +import org.h2.mvstore.MVStore; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.time.Duration; +import java.time.Instant; +import java.time.temporal.TemporalAmount; +import java.util.*; +import java.util.concurrent.*; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; + +/** + * @author hal.hildebrand + **/ +public class LeydenJar { + public static final String LEYDEN_JAR = "Leyden-Jar"; + private static final Logger log = LoggerFactory.getLogger(LeydenJar.class); + private static final String DIGESTS = "Digests"; + + private final Context context; + private final RouterImpl.CommonCommunications reconComms; + private final RouterImpl.CommonCommunications binderComms; + private final DigestAlgorithm algorithm; + private final double fpr; + private final SigningMember member; + private final MVMap bottled; + private final MVMap digests; + private final AtomicBoolean started = new AtomicBoolean(); + private final RingCommunications reconcile; + private final NavigableMap> pending = new ConcurrentSkipListMap<>(); + private final Borders borders; + private final Reconciled recon; + private final TemporalAmount operationTimeout; + private final Duration operationsFrequency; + private final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool( + 1, Thread.ofVirtual().factory()); + private final OpValidator validator; + + public LeydenJar(OpValidator validator, TemporalAmount operationTimeout, SigningMember member, + Context context, Duration operationsFrequency, Router communications, double fpr, + DigestAlgorithm algorithm, MVStore store, ReconciliationMetrics metrics, + BinderMetrics binderMetrics) { + this.validator = validator; + this.context = context; + this.member = member; + this.algorithm = algorithm; + recon = new Reconciled(); + this.operationTimeout = operationTimeout; + this.operationsFrequency = operationsFrequency; + reconComms = communications.create(member, context.getId(), recon, + ReconciliationService.class.getCanonicalName(), + r -> new ReconciliationServer(r, communications.getClientIdentityProvider(), + metrics), c -> Reckoning.getCreate(c, metrics), + Reckoning.getLocalLoopback(recon, member)); + + borders = new Borders(); + binderComms = communications.create(member, context.getId(), borders, BinderService.class.getCanonicalName(), + r -> new BinderServer(r, communications.getClientIdentityProvider(), + binderMetrics), c -> Bind.getCreate(c, binderMetrics), + Bind.getLocalLoopback(borders, member)); + this.fpr = fpr; + bottled = store.openMap(LEYDEN_JAR, new MVMap.Builder().keyType(new DigestDatatype(algorithm)) + .valueType(new BoundDatatype())); + digests = store.openMap(DIGESTS, new MVMap.Builder().keyType(new DigestDatatype(algorithm)) + .valueType(new DigestDatatype(algorithm))); + reconcile = new RingCommunications<>(this.context, member, reconComms); + } + + public void bind(Binding bound) { + var key = bound.getBound().getKey(); + log.info("Bind: {} on: {}", Hex.hex(key.toByteArray()), member.getId()); + var hash = algorithm.digest(key); + Instant timedOut = Instant.now().plus(operationTimeout); + Supplier isTimedOut = () -> Instant.now().isAfter(timedOut); + var result = new CompletableFuture(); + var gathered = HashMultiset.create(); + var iterate = new RingIterator(operationsFrequency, context, member, scheduler, + binderComms); + iterate.iterate(hash, null, (link, r) -> { + link.bind(bound); + return ""; + }, () -> failedMajority(result, maxCount(gathered)), + (tally, futureSailor, destination) -> write(result, gathered, tally, futureSailor, hash, + isTimedOut, destination), + t -> failedMajority(result, maxCount(gathered))); + try { + result.get(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } catch (ExecutionException e) { + if (e.getCause() instanceof RuntimeException re) { + throw re; + } + throw new IllegalStateException(e.getCause()); + } + } + + public Bound get(KeyAndToken keyAndToken) { + var hash = algorithm.digest(keyAndToken.getKey()); + log.info("Get: {} on: {}", hash, member.getId()); + Instant timedOut = Instant.now().plus(operationTimeout); + Supplier isTimedOut = () -> Instant.now().isAfter(timedOut); + var result = new CompletableFuture(); + var gathered = HashMultiset.create(); + var iterate = new RingIterator(operationsFrequency, context, member, scheduler, + binderComms); + iterate.iterate(hash, null, (link, r) -> { + var bound = link.get(keyAndToken); + log.debug("Get {}: bound: <{}:{}> from: {} on: {}", hash, bound.getKey().toStringUtf8(), + bound.getValue().toStringUtf8(), link.getMember().getId(), member.getId()); + return bound; + }, () -> failedMajority(result, maxCount(gathered)), + (tally, futureSailor, destination) -> read(result, gathered, tally, futureSailor, hash, + isTimedOut, destination, "Get"), + t -> failedMajority(result, maxCount(gathered))); + try { + return result.get(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + return null; + } catch (ExecutionException e) { + if (e.getCause() instanceof RuntimeException re) { + throw re; + } + throw new IllegalStateException(e.getCause()); + } + } + + public void start(Duration gossip) { + if (!started.compareAndSet(false, true)) { + return; + } + log.info("Starting context: {}:{} on: {}", context.getId(), System.identityHashCode(context), member.getId()); + binderComms.register(context.getId(), borders); + reconComms.register(context.getId(), recon); + reconcile(scheduler, gossip); + } + + public void stop() { + if (!started.compareAndSet(true, false)) { + return; + } + log.info("Stopping: {}", member.getId()); + binderComms.deregister(context.getId()); + reconComms.deregister(context.getId()); + } + + public void unbind(KeyAndToken keyAndToken) { + var key = keyAndToken.toByteArray(); + var hash = algorithm.digest(key); + log.info("Unbind: {} on: {}", hash, member.getId()); + Instant timedOut = Instant.now().plus(operationTimeout); + Supplier isTimedOut = () -> Instant.now().isAfter(timedOut); + var result = new CompletableFuture(); + var gathered = HashMultiset.create(); + var iterate = new RingIterator(operationsFrequency, context, member, scheduler, + binderComms); + iterate.iterate(hash, null, (link, r) -> { + link.unbind(keyAndToken); + return ""; + }, () -> failedMajority(result, maxCount(gathered)), + (tally, futureSailor, destination) -> read(result, gathered, tally, futureSailor, hash, + isTimedOut, destination, "Unbind"), + t -> failedMajority(result, maxCount(gathered))); + try { + result.get(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } catch (ExecutionException e) { + if (e.getCause() instanceof RuntimeException re) { + throw re; + } + throw new IllegalStateException(e.getCause()); + } + } + + private void add(Digest hash, Bound bound, Digest digest) { + var existing = digests.get(hash); + if (existing == null || !existing.equals(digest)) { + bottled.put(hash, bound); + digests.put(hash, digest); + log.info("Add: <{}> on: {}", bound.getKey().toStringUtf8(), member.getId()); + } + } + + private Stream bindingsIn(KeyInterval i) { + Iterator it = new Iterator() { + private final Iterator iterate = bottled.keyIterator(i.getBegin()); + private Digest next; + + { + if (iterate.hasNext()) { + next = iterate.next(); + if (next.compareTo(i.getEnd()) > 0) { + next = null; // got nothing + } + } + } + + @Override + public boolean hasNext() { + return next != null; + } + + @Override + public Digest next() { + var returned = next; + next = null; + if (returned == null) { + throw new NoSuchElementException(); + } + if (iterate.hasNext()) { + next = iterate.next(); + if (next.compareTo(i.getEnd()) > 0) { + next = null; // got nothing + } + } + return returned; + } + }; + Iterable iterable = () -> it; + return StreamSupport.stream(iterable.spliterator(), false); + } + + private void failedMajority(CompletableFuture result, int maxAgree) { + result.completeExceptionally(new NoSuchElementException( + "Unable to achieve majority read, max: %s required: %s on: %s".formatted(maxAgree, context.majority(), + member.getId()))); + } + + private boolean invalid(Digest from, int ring) { + if (ring >= context.getRingCount() || ring < 0) { + log.warn("invalid ring: {} from: {} on: {}", ring, from, member.getId()); + return true; + } + + Member predecessor = context.ring(ring).predecessor(member); + if (predecessor == null || !from.equals(predecessor.getId())) { + log.warn("Invalid, not predecessor: {}, ring: {} expected: {} on: {}", from, ring, predecessor.getId(), + member.getId()); + return true; + } + return false; + } + + private CombinedIntervals keyIntervals() { + List intervals = new ArrayList<>(); + for (int i = 0; i < context.getRingCount(); i++) { + Ring ring = context.ring(i); + Member predecessor = ring.predecessor(member); + if (predecessor == null) { + continue; + } + + Digest begin = ring.hash(predecessor); + Digest end = ring.hash(member); + + if (begin.compareTo(end) > 0) { // wrap around the origin of the ring + intervals.add(new KeyInterval(end, algorithm.getLast())); + intervals.add(new KeyInterval(algorithm.getOrigin(), begin)); + } else { + intervals.add(new KeyInterval(begin, end)); + } + } + return new CombinedIntervals(intervals); + } + + private Multiset.Entry max(HashMultiset gathered) { + return gathered.entrySet().stream().max(Ordering.natural().onResultOf(Multiset.Entry::getCount)).orElse(null); + } + + private int maxCount(HashMultiset gathered) { + final var max = gathered.entrySet().stream().max(Ordering.natural().onResultOf(Multiset.Entry::getCount)); + return max.isEmpty() ? 0 : max.get().getCount(); + } + + private Biff populate(long seed, CombinedIntervals keyIntervals) { + BloomFilter.DigestBloomFilter bff = new BloomFilter.DigestBloomFilter(seed, Math.max(bottled.size(), 100), fpr); + bottled.keyIterator(algorithm.getOrigin()).forEachRemaining(d -> { + if (keyIntervals.test(d)) { + var bound = bottled.get(d); + if (bound != null) { + var digest = algorithm.digest(bound.toByteString()); + bff.add(digest); + } + } + }); + return bff.toBff(); + } + + private boolean read(CompletableFuture result, HashMultiset gathered, AtomicInteger tally, + Optional futureSailor, Digest hash, Supplier isTimedOut, + RingCommunications.Destination destination, String op) { + if (futureSailor.isEmpty()) { + log.debug("{}: {} empty from: {} on: {}", op, hash, destination.member().getId(), member.getId()); + return !isTimedOut.get(); + } + var content = futureSailor.get(); + if (content != null) { + log.debug("{}: {} from: {} on: {}", op, hash, destination.member().getId(), member.getId()); + gathered.add(content); + var max = max(gathered); + if (max != null) { + tally.set(max.getCount()); + if (max.getCount() > context.toleranceLevel()) { + result.complete(max.getElement()); + log.debug("Majority {}: {} achieved: {} on: {}", op, max.getCount(), hash, member.getId()); + return false; + } + } + return !isTimedOut.get(); + } else { + log.debug("Failed {}: {} from: {} on: {}", op, hash, destination.member().getId(), member.getId()); + return !isTimedOut.get(); + } + } + + private Update reconcile(ReconciliationClient link, Integer ring) { + if (member.equals(link.getMember())) { + log.debug("Reconciliation on ring: {} with self on: {} ", ring, member.getId()); + return null; + } + CombinedIntervals keyIntervals = keyIntervals(); + log.debug("Interval reconciliation on ring: {} with: {} intervals: {} on: {} ", ring, link.getMember().getId(), + keyIntervals, member.getId()); + return link.reconcile(Intervals.newBuilder() + .setRing(ring) + .addAllIntervals(keyIntervals.toIntervals()) + .setHave(populate(Entropy.nextBitsStreamLong(), keyIntervals)) + .build()); + } + + private void reconcile(Optional result, + RingCommunications.Destination destination, + ScheduledExecutorService scheduler, Duration duration) { + if (!started.get()) { + return; + } + if (result.isPresent()) { + try { + Update update = result.get(); + log.trace("Received: {} events in interval reconciliation from: {} on: {}", update.getBindingsCount(), + destination.member().getId(), member.getId()); + update(update.getBindingsList(), destination.member().getId()); + } catch (NoSuchElementException e) { + log.debug("null interval reconciliation with {} on: {}", destination.member().getId(), member.getId(), + e.getCause()); + } + } else { + log.trace("Received no events in interval reconciliation from: {} on: {}", destination.member().getId(), + member.getId()); + } + if (started.get()) { + scheduler.schedule(() -> Thread.ofVirtual().start(Utils.wrapped(() -> reconcile(scheduler, duration), log)), + duration.toNanos(), TimeUnit.NANOSECONDS); + } + } + + private void reconcile(ScheduledExecutorService scheduler, Duration duration) { + if (!started.get()) { + return; + } + Thread.ofVirtual() + .start(() -> reconcile.execute(this::reconcile, + (futureSailor, destination) -> reconcile(futureSailor, destination, + scheduler, duration))); + } + + /** + * Reconcile the intervals for our partner + * + * @param intervals - the relevant intervals of keys and the digests of these keys the partner already have + * @return the Update.Builder of missing keys, based on the supplied intervals + */ + private Update.Builder reconcile(Intervals intervals) { + var biff = BloomFilter.from(intervals.getHave()); + var update = Update.newBuilder(); + intervals.getIntervalsList() + .stream() + .map(KeyInterval::new) + .flatMap(this::bindingsIn) + .peek(d -> log.debug("reconcile digest: {} on: {}", d, member.getId())) + .filter(d -> !biff.contains(d)) + .peek(d -> log.debug("filtered reconcile digest: {} on: {}", d, member.getId())) + .map(d1 -> bottled.get(d1)) + .filter(Objects::nonNull) + .forEach(update::addBindings); + return update; + } + + private void update(List bindings, Digest from) { + if (bindings.isEmpty()) { + log.trace("No bindings to update: {} on: {}", from, member.getId()); + return; + } + + log.trace("Events to update: {} on: {}", bindings.size(), member.getId()); + for (var bound : bindings) { + var hash = algorithm.digest(bound.getKey()); + var existing = digests.get(hash); + var digest = algorithm.digest(bound.toByteString()); + if (existing != null && existing.equals(digest)) { + continue; + } + var states = pending.computeIfAbsent(digest, k -> new CopyOnWriteArrayList<>()); + var found = false; + for (var cs : states) { + if (cs.test(bound, from)) { + found = true; + if (cs.count() >= context.majority()) { + add(hash, bound, digest); + pending.remove(digest); + } + break; + } + } + if (!found) { + states.add(new ConsensusState(bound, from)); + } + } + } + + private boolean write(CompletableFuture result, HashMultiset gathered, AtomicInteger tally, + Optional futureSailor, Digest hash, Supplier isTimedOut, + RingCommunications.Destination destination) { + if (futureSailor.isEmpty()) { + return !isTimedOut.get(); + } + var content = futureSailor.get(); + if (content != null) { + log.debug("Bind: {} from: {} on: {}", hash, destination.member().getId(), member.getId()); + gathered.add(content); + var max = max(gathered); + if (max != null) { + tally.set(max.getCount()); + if (max.getCount() > context.toleranceLevel()) { + result.complete(max.getElement()); + log.debug("Majority Bind : {} achieved: {} on: {}", max.getCount(), hash, member.getId()); + return true; + } + } + return !isTimedOut.get(); + } else { + log.debug("Failed: Bind : {} from: {} on: {}", hash, destination.member().getId(), member.getId()); + return !isTimedOut.get(); + } + } + + public interface OpValidator { + boolean validateBind(Bound bound, byte[] token); + + boolean validateGet(byte[] key, byte[] token); + + boolean validateUnbind(byte[] key, byte[] token); + } + + private static class ConsensusState { + private final Bound binding; + private final List members = new ArrayList<>(); + + ConsensusState(Bound binding, Digest from) { + this.binding = binding; + members.add(from); + } + + int count() { + return members.size(); + } + + /** + * Test the binding against the receiver's. If the from id is not already in the members set, add it + * + * @param binding - the replicated Bound + * @param from - the Digest id of the originating member + * @return true if the binding equals the receiver's binding, false if not + */ + boolean test(Bound binding, Digest from) { + if (!this.binding.equals(binding)) { + return false; + } + for (var m : members) { + if (m.equals(from)) { + return true; + } + } + members.add(from); + return true; + } + } + + private class Reconciled implements ReconciliationService { + + @Override + public Update reconcile(Intervals intervals, Digest from) { + var ring = intervals.getRing(); + if (invalid(from, ring)) { + log.warn("Invalid reconcile from: {} ring: {} on: {}", from, ring, member.getId()); + return Update.getDefaultInstance(); + } + log.trace("Reconcile from: {} ring: {} on: {}", from, ring, member.getId()); + var builder = LeydenJar.this.reconcile(intervals); + CombinedIntervals keyIntervals = keyIntervals(); + builder.addAllIntervals(keyIntervals.toIntervals()) + .setHave(populate(Entropy.nextBitsStreamLong(), keyIntervals)); + log.trace("Reconcile for: {} ring: {} count: {} on: {}", from, ring, builder.getBindingsCount(), + member.getId()); + return builder.build(); + } + + @Override + public void update(Updating update, Digest from) { + var ring = update.getRing(); + if (invalid(from, ring)) { + log.warn("Invalid update from: {} ring: {} on: {}", from, ring, member.getId()); + return; + } + LeydenJar.this.update(update.getBindingsList(), from); + } + } + + private class Borders implements BinderService { + + @Override + public void bind(Binding request, Digest from) { + var bound = request.getBound(); + if (!validator.validateBind(bound, request.getToken().toByteArray())) { + log.warn("Invalid Bind Token on: {}", member.getId()); + throw new StatusRuntimeException(Status.INVALID_ARGUMENT); + } + var hash = algorithm.digest(bound.getKey()); + log.debug("Bind: {} on: {}", hash, member.getId()); + bottled.put(hash, bound); + var digest = algorithm.digest(bound.toByteString()); + digests.put(hash, digest); + } + + @Override + public Bound get(KeyAndToken request, Digest from) { + if (!validator.validateGet(request.getKey().toByteArray(), request.getToken().toByteArray())) { + log.warn("Invalid Get Token on: {}", member.getId()); + throw new StatusRuntimeException(Status.INVALID_ARGUMENT); + } + var hash = algorithm.digest(request.getKey()); + var bound = bottled.getOrDefault(hash, Bound.getDefaultInstance()); + log.debug("Get: {} bound: {} on: {}", hash, bound != null, member.getId()); + return bound; + } + + @Override + public void unbind(KeyAndToken request, Digest from) { + if (!validator.validateUnbind(request.getKey().toByteArray(), request.getToken().toByteArray())) { + log.warn("Invalid Unbind Token on: {}", member.getId()); + throw new StatusRuntimeException(Status.INVALID_ARGUMENT); + } + var hash = algorithm.digest(request.getKey()); + log.debug("Remove: {} on: {}", hash, member.getId()); + bottled.remove(hash); + digests.remove(hash); + } + } +} diff --git a/leyden/src/main/java/com/salesforce/apollo/leyden/comm/binding/Bind.java b/leyden/src/main/java/com/salesforce/apollo/leyden/comm/binding/Bind.java new file mode 100644 index 0000000000..a6a0d9c5a5 --- /dev/null +++ b/leyden/src/main/java/com/salesforce/apollo/leyden/comm/binding/Bind.java @@ -0,0 +1,84 @@ +package com.salesforce.apollo.leyden.comm.binding; + +import com.salesforce.apollo.archipelago.ManagedServerChannel; +import com.salesforce.apollo.leyden.proto.BinderGrpc; +import com.salesforce.apollo.leyden.proto.Binding; +import com.salesforce.apollo.leyden.proto.Bound; +import com.salesforce.apollo.leyden.proto.KeyAndToken; +import com.salesforce.apollo.membership.Member; +import com.salesforce.apollo.membership.SigningMember; + +import java.io.IOException; + +/** + * @author hal.hildebrand + **/ +public class Bind implements BinderClient { + private final ManagedServerChannel channel; + private final BinderMetrics metrics; + private final BinderGrpc.BinderBlockingStub client; + + public Bind(ManagedServerChannel channel, BinderMetrics metrics) { + this.channel = channel; + this.metrics = metrics; + this.client = BinderGrpc.newBlockingStub(channel); + } + + public static BinderClient getCreate(ManagedServerChannel c, BinderMetrics binderMetrics) { + return new Bind(c, binderMetrics); + } + + public static BinderClient getLocalLoopback(BinderService service, SigningMember member) { + return new BinderClient() { + @Override + public void bind(Binding binding) { + service.bind(binding, member.getId()); + } + + @Override + public void close() throws IOException { + // no op + } + + @Override + public Bound get(KeyAndToken key) { + return service.get(key, member.getId()); + } + + @Override + public Member getMember() { + return member; + } + + @Override + public void unbind(KeyAndToken key) { + service.unbind(key, member.getId()); + } + }; + } + + @Override + public void bind(Binding binding) { + client.bind(binding); + } + + @Override + public void close() throws IOException { + channel.release(); + } + + @Override + public Bound get(KeyAndToken key) { + return client.get(key); + } + + @Override + public Member getMember() { + return channel.getMember(); + } + + @Override + public void unbind(KeyAndToken key) { + client.unbind(key); + } +} diff --git a/leyden/src/main/java/com/salesforce/apollo/leyden/comm/binding/BinderClient.java b/leyden/src/main/java/com/salesforce/apollo/leyden/comm/binding/BinderClient.java new file mode 100644 index 0000000000..bfe6d404c7 --- /dev/null +++ b/leyden/src/main/java/com/salesforce/apollo/leyden/comm/binding/BinderClient.java @@ -0,0 +1,18 @@ +package com.salesforce.apollo.leyden.comm.binding; + +import com.salesforce.apollo.archipelago.Link; +import com.salesforce.apollo.leyden.proto.Binding; +import com.salesforce.apollo.leyden.proto.Bound; +import com.salesforce.apollo.leyden.proto.KeyAndToken; + +/** + * @author hal.hildebrand + **/ +public interface BinderClient extends Link { + + void bind(Binding binding); + + Bound get(KeyAndToken key); + + void unbind(KeyAndToken key); +} diff --git a/leyden/src/main/java/com/salesforce/apollo/leyden/comm/binding/BinderMetrics.java b/leyden/src/main/java/com/salesforce/apollo/leyden/comm/binding/BinderMetrics.java new file mode 100644 index 0000000000..cf400f4a1a --- /dev/null +++ b/leyden/src/main/java/com/salesforce/apollo/leyden/comm/binding/BinderMetrics.java @@ -0,0 +1,22 @@ +package com.salesforce.apollo.leyden.comm.binding; + +import com.codahale.metrics.Histogram; +import com.codahale.metrics.Timer; +import com.salesforce.apollo.protocols.EndpointMetrics; + +/** + * @author hal.hildebrand + **/ +public interface BinderMetrics extends EndpointMetrics { + Histogram inboundBind(); + + Timer inboundBindTimer(); + + Histogram inboundGet(); + + Timer inboundGetTimer(); + + Histogram inboundUnbind(); + + Timer inboundUnbindTimer(); +} diff --git a/leyden/src/main/java/com/salesforce/apollo/leyden/comm/binding/BinderServer.java b/leyden/src/main/java/com/salesforce/apollo/leyden/comm/binding/BinderServer.java new file mode 100644 index 0000000000..b5134a82e5 --- /dev/null +++ b/leyden/src/main/java/com/salesforce/apollo/leyden/comm/binding/BinderServer.java @@ -0,0 +1,107 @@ +package com.salesforce.apollo.leyden.comm.binding; + +import com.codahale.metrics.Timer; +import com.google.protobuf.Empty; +import com.salesforce.apollo.archipelago.RoutableService; +import com.salesforce.apollo.cryptography.Digest; +import com.salesforce.apollo.leyden.proto.BinderGrpc; +import com.salesforce.apollo.leyden.proto.Binding; +import com.salesforce.apollo.leyden.proto.Bound; +import com.salesforce.apollo.leyden.proto.KeyAndToken; +import com.salesforce.apollo.protocols.ClientIdentity; +import io.grpc.stub.StreamObserver; + +/** + * @author hal.hildebrand + **/ +public class BinderServer extends BinderGrpc.BinderImplBase { + + private final RoutableService routing; + private final ClientIdentity identity; + private final BinderMetrics metrics; + + public BinderServer(RoutableService r, ClientIdentity clientIdentityProvider, + BinderMetrics binderMetrics) { + routing = r; + this.identity = clientIdentityProvider; + this.metrics = binderMetrics; + } + + @Override + public void bind(Binding request, StreamObserver responseObserver) { + Timer.Context timer = metrics == null ? null : metrics.inboundBindTimer().time(); + if (metrics != null) { + var serializedSize = request.getSerializedSize(); + metrics.inboundBandwidth().mark(serializedSize); + metrics.inboundBind().update(serializedSize); + } + Digest from = identity.getFrom(); + if (from == null) { + responseObserver.onError(new IllegalStateException("Member has been removed")); + return; + } + routing.evaluate(responseObserver, s -> { + try { + s.bind(request, from); + responseObserver.onNext(Empty.getDefaultInstance()); + responseObserver.onCompleted(); + } finally { + if (timer != null) { + timer.stop(); + } + } + }); + } + + @Override + public void get(KeyAndToken request, StreamObserver responseObserver) { + Timer.Context timer = metrics == null ? null : metrics.inboundGetTimer().time(); + if (metrics != null) { + var serializedSize = request.getSerializedSize(); + metrics.inboundBandwidth().mark(serializedSize); + metrics.inboundGet().update(serializedSize); + } + Digest from = identity.getFrom(); + if (from == null) { + responseObserver.onError(new IllegalStateException("Member has been removed")); + return; + } + routing.evaluate(responseObserver, s -> { + try { + var bound = s.get(request, from); + responseObserver.onNext(bound); + responseObserver.onCompleted(); + } finally { + if (timer != null) { + timer.stop(); + } + } + }); + } + + @Override + public void unbind(KeyAndToken request, StreamObserver responseObserver) { + Timer.Context timer = metrics == null ? null : metrics.inboundUnbindTimer().time(); + if (metrics != null) { + var serializedSize = request.getSerializedSize(); + metrics.inboundBandwidth().mark(serializedSize); + metrics.inboundUnbind().update(serializedSize); + } + Digest from = identity.getFrom(); + if (from == null) { + responseObserver.onError(new IllegalStateException("Member has been removed")); + return; + } + routing.evaluate(responseObserver, s -> { + try { + s.unbind(request, from); + responseObserver.onNext(Empty.getDefaultInstance()); + responseObserver.onCompleted(); + } finally { + if (timer != null) { + timer.stop(); + } + } + }); + } +} diff --git a/leyden/src/main/java/com/salesforce/apollo/leyden/comm/binding/BinderService.java b/leyden/src/main/java/com/salesforce/apollo/leyden/comm/binding/BinderService.java new file mode 100644 index 0000000000..e36e6429d1 --- /dev/null +++ b/leyden/src/main/java/com/salesforce/apollo/leyden/comm/binding/BinderService.java @@ -0,0 +1,17 @@ +package com.salesforce.apollo.leyden.comm.binding; + +import com.salesforce.apollo.cryptography.Digest; +import com.salesforce.apollo.leyden.proto.Binding; +import com.salesforce.apollo.leyden.proto.Bound; +import com.salesforce.apollo.leyden.proto.KeyAndToken; + +/** + * @author hal.hildebrand + **/ +public interface BinderService { + void bind(Binding request, Digest from); + + Bound get(KeyAndToken request, Digest from); + + void unbind(KeyAndToken request, Digest from); +} diff --git a/leyden/src/main/java/com/salesforce/apollo/leyden/comm/reconcile/Reckoning.java b/leyden/src/main/java/com/salesforce/apollo/leyden/comm/reconcile/Reckoning.java new file mode 100644 index 0000000000..ac993c63f0 --- /dev/null +++ b/leyden/src/main/java/com/salesforce/apollo/leyden/comm/reconcile/Reckoning.java @@ -0,0 +1,74 @@ +package com.salesforce.apollo.leyden.comm.reconcile; + +import com.salesforce.apollo.archipelago.ManagedServerChannel; +import com.salesforce.apollo.leyden.proto.Intervals; +import com.salesforce.apollo.leyden.proto.ReconciliationGrpc; +import com.salesforce.apollo.leyden.proto.Update; +import com.salesforce.apollo.leyden.proto.Updating; +import com.salesforce.apollo.membership.Member; +import com.salesforce.apollo.membership.SigningMember; + +import java.io.IOException; + +/** + * @author hal.hildebrand + **/ +public class Reckoning implements ReconciliationClient { + private final ManagedServerChannel channel; + private final ReconciliationGrpc.ReconciliationBlockingStub client; + private final Member member; + + public Reckoning(ManagedServerChannel channel, Member member, ReconciliationMetrics metrics) { + this.channel = channel; + this.client = ReconciliationGrpc.newBlockingStub(channel); + this.member = member; + } + + public static ReconciliationClient getCreate(ManagedServerChannel channel, ReconciliationMetrics metrics) { + return new Reckoning(channel, channel.getMember(), metrics); + } + + public static ReconciliationClient getLocalLoopback(ReconciliationService service, SigningMember member) { + return new ReconciliationClient() { + @Override + public void close() throws IOException { + + } + + @Override + public Member getMember() { + return member; + } + + @Override + public Update reconcile(Intervals intervals) { + return Update.getDefaultInstance(); + } + + @Override + public void update(Updating updating) { + // noop + } + }; + } + + @Override + public void close() throws IOException { + channel.release(); + } + + @Override + public Member getMember() { + return member; + } + + @Override + public Update reconcile(Intervals intervals) { + return client.reconcile(intervals); + } + + @Override + public void update(Updating updating) { + client.update(updating); + } +} diff --git a/leyden/src/main/java/com/salesforce/apollo/leyden/comm/reconcile/ReconciliationClient.java b/leyden/src/main/java/com/salesforce/apollo/leyden/comm/reconcile/ReconciliationClient.java new file mode 100644 index 0000000000..b375f4a427 --- /dev/null +++ b/leyden/src/main/java/com/salesforce/apollo/leyden/comm/reconcile/ReconciliationClient.java @@ -0,0 +1,15 @@ +package com.salesforce.apollo.leyden.comm.reconcile; + +import com.salesforce.apollo.archipelago.Link; +import com.salesforce.apollo.leyden.proto.Intervals; +import com.salesforce.apollo.leyden.proto.Update; +import com.salesforce.apollo.leyden.proto.Updating; + +/** + * @author hal.hildebrand + **/ +public interface ReconciliationClient extends Link { + Update reconcile(Intervals intervals); + + void update(Updating updating); +} diff --git a/leyden/src/main/java/com/salesforce/apollo/leyden/comm/reconcile/ReconciliationMetrics.java b/leyden/src/main/java/com/salesforce/apollo/leyden/comm/reconcile/ReconciliationMetrics.java new file mode 100644 index 0000000000..63baf8dff3 --- /dev/null +++ b/leyden/src/main/java/com/salesforce/apollo/leyden/comm/reconcile/ReconciliationMetrics.java @@ -0,0 +1,15 @@ +package com.salesforce.apollo.leyden.comm.reconcile; + +import com.codahale.metrics.Histogram; +import com.codahale.metrics.Timer; +import com.salesforce.apollo.protocols.EndpointMetrics; + +public interface ReconciliationMetrics extends EndpointMetrics { + Histogram inboundReconcile(); + + Timer inboundReconcileTimer(); + + Timer inboundUpdateTimer(); + + Histogram reconcileReply(); +} diff --git a/leyden/src/main/java/com/salesforce/apollo/leyden/comm/reconcile/ReconciliationServer.java b/leyden/src/main/java/com/salesforce/apollo/leyden/comm/reconcile/ReconciliationServer.java new file mode 100644 index 0000000000..f8827d5f07 --- /dev/null +++ b/leyden/src/main/java/com/salesforce/apollo/leyden/comm/reconcile/ReconciliationServer.java @@ -0,0 +1,85 @@ +package com.salesforce.apollo.leyden.comm.reconcile; + +import com.codahale.metrics.Timer; +import com.google.protobuf.Empty; +import com.salesforce.apollo.archipelago.RoutableService; +import com.salesforce.apollo.cryptography.Digest; +import com.salesforce.apollo.leyden.proto.Intervals; +import com.salesforce.apollo.leyden.proto.ReconciliationGrpc; +import com.salesforce.apollo.leyden.proto.Update; +import com.salesforce.apollo.leyden.proto.Updating; +import com.salesforce.apollo.protocols.ClientIdentity; +import io.grpc.stub.StreamObserver; + +/** + * @author hal.hildebrand + **/ +public class ReconciliationServer extends ReconciliationGrpc.ReconciliationImplBase { + private final RoutableService routing; + private final ReconciliationMetrics metrics; + private final ClientIdentity identity; + + public ReconciliationServer(RoutableService r, ClientIdentity identity, + ReconciliationMetrics metrics) { + this.routing = r; + this.identity = identity; + this.metrics = metrics; + } + + @Override + public void reconcile(Intervals request, StreamObserver responseObserver) { + Timer.Context timer = metrics == null ? null : metrics.inboundReconcileTimer().time(); + if (metrics != null) { + var serializedSize = request.getSerializedSize(); + metrics.inboundBandwidth().mark(serializedSize); + metrics.inboundReconcile().update(serializedSize); + } + Digest from = identity.getFrom(); + if (from == null) { + responseObserver.onError(new IllegalStateException("Member has been removed")); + return; + } + routing.evaluate(responseObserver, s -> { + try { + Update response = s.reconcile(request, from); + responseObserver.onNext(response); + responseObserver.onCompleted(); + if (metrics != null) { + var serializedSize = response.getSerializedSize(); + metrics.outboundBandwidth().mark(serializedSize); + metrics.reconcileReply().update(serializedSize); + } + } finally { + if (timer != null) { + timer.stop(); + } + } + }); + } + + @Override + public void update(Updating request, StreamObserver responseObserver) { + Timer.Context timer = metrics == null ? null : metrics.inboundUpdateTimer().time(); + if (metrics != null) { + var serializedSize = request.getSerializedSize(); + metrics.inboundBandwidth().mark(serializedSize); + metrics.inboundReconcile().update(serializedSize); + } + Digest from = identity.getFrom(); + if (from == null) { + responseObserver.onError(new IllegalStateException("Member has been removed")); + return; + } + routing.evaluate(responseObserver, s -> { + try { + s.update(request, from); + responseObserver.onNext(Empty.getDefaultInstance()); + responseObserver.onCompleted(); + } finally { + if (timer != null) { + timer.stop(); + } + } + }); + } +} diff --git a/leyden/src/main/java/com/salesforce/apollo/leyden/comm/reconcile/ReconciliationService.java b/leyden/src/main/java/com/salesforce/apollo/leyden/comm/reconcile/ReconciliationService.java new file mode 100644 index 0000000000..ce5a4bdb19 --- /dev/null +++ b/leyden/src/main/java/com/salesforce/apollo/leyden/comm/reconcile/ReconciliationService.java @@ -0,0 +1,15 @@ +package com.salesforce.apollo.leyden.comm.reconcile; + +import com.salesforce.apollo.cryptography.Digest; +import com.salesforce.apollo.leyden.proto.Intervals; +import com.salesforce.apollo.leyden.proto.Update; +import com.salesforce.apollo.leyden.proto.Updating; + +/** + * @author hal.hildebrand + **/ +public interface ReconciliationService { + Update reconcile(Intervals request, Digest from); + + void update(Updating request, Digest from); +} diff --git a/leyden/src/test/java/com/salesforce/apollo/leyden/CombinedIntervalsTest.java b/leyden/src/test/java/com/salesforce/apollo/leyden/CombinedIntervalsTest.java new file mode 100644 index 0000000000..a6920d1dcb --- /dev/null +++ b/leyden/src/test/java/com/salesforce/apollo/leyden/CombinedIntervalsTest.java @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2022, salesforce.com, inc. + * All rights reserved. + * SPDX-License-Identifier: BSD-3-Clause + * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause + */ + +package com.salesforce.apollo.leyden; + +import com.salesforce.apollo.cryptography.Digest; +import com.salesforce.apollo.cryptography.DigestAlgorithm; +import org.junit.jupiter.api.Test; + +import java.util.ArrayList; +import java.util.List; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +/** + * @author hal.hildebrand + * @since 220 + */ +public class CombinedIntervalsTest { + + @Test + public void smoke() { + List intervals = new ArrayList<>(); + + intervals.add(new KeyInterval(Digest.normalized(DigestAlgorithm.DEFAULT, new byte[] { 0, (byte) 200 }), + Digest.normalized(DigestAlgorithm.DEFAULT, new byte[] { 0, (byte) 241 }))); + intervals.add(new KeyInterval(Digest.normalized(DigestAlgorithm.DEFAULT, new byte[] { 0, 50 }), + Digest.normalized(DigestAlgorithm.DEFAULT, new byte[] { 0, 75 }))); + intervals.add(new KeyInterval(Digest.normalized(DigestAlgorithm.DEFAULT, new byte[] { 0, 50 }), + Digest.normalized(DigestAlgorithm.DEFAULT, new byte[] { 0, 90 }))); + intervals.add(new KeyInterval(Digest.normalized(DigestAlgorithm.DEFAULT, new byte[] { 0, 25 }), + Digest.normalized(DigestAlgorithm.DEFAULT, new byte[] { 0, 49 }))); + intervals.add(new KeyInterval(Digest.normalized(DigestAlgorithm.DEFAULT, new byte[] { 0, 25 }), + Digest.normalized(DigestAlgorithm.DEFAULT, new byte[] { 0, 49 }))); + intervals.add(new KeyInterval(Digest.normalized(DigestAlgorithm.DEFAULT, new byte[] { 0, (byte) 128 }), + Digest.normalized(DigestAlgorithm.DEFAULT, new byte[] { 0, (byte) 175 }))); + CombinedIntervals combined = new CombinedIntervals(intervals); + var compressed = combined.intervals().toList(); + System.out.println(compressed); + assertEquals(4, compressed.size()); + } +} diff --git a/leyden/src/test/java/com/salesforce/apollo/leyden/LeydenJarTest.java b/leyden/src/test/java/com/salesforce/apollo/leyden/LeydenJarTest.java new file mode 100644 index 0000000000..11c2f2fd57 --- /dev/null +++ b/leyden/src/test/java/com/salesforce/apollo/leyden/LeydenJarTest.java @@ -0,0 +1,135 @@ +package com.salesforce.apollo.leyden; + +import com.google.protobuf.ByteString; +import com.salesforce.apollo.archipelago.LocalServer; +import com.salesforce.apollo.archipelago.Router; +import com.salesforce.apollo.archipelago.ServerConnectionCache; +import com.salesforce.apollo.cryptography.DigestAlgorithm; +import com.salesforce.apollo.leyden.proto.Binding; +import com.salesforce.apollo.leyden.proto.Bound; +import com.salesforce.apollo.leyden.proto.KeyAndToken; +import com.salesforce.apollo.membership.Context; +import com.salesforce.apollo.membership.Member; +import com.salesforce.apollo.membership.SigningMember; +import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; +import com.salesforce.apollo.stereotomy.StereotomyImpl; +import com.salesforce.apollo.stereotomy.mem.MemKERL; +import com.salesforce.apollo.stereotomy.mem.MemKeyStore; +import com.salesforce.apollo.utils.Utils; +import org.h2.jdbcx.JdbcConnectionPool; +import org.h2.mvstore.MVStore; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.security.SecureRandom; +import java.time.Duration; +import java.util.*; +import java.util.concurrent.Executors; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * @author hal.hildebrand + **/ +public class LeydenJarTest { + + private static final double PBYZ = 0.1; + protected final TreeMap dhts = new TreeMap<>(); + protected final Map routers = new HashMap<>(); + private String prefix; + private LeydenJar.OpValidator validator; + private Context context; + + @AfterEach + public void after() { + routers.values().forEach(r -> r.close(Duration.ofSeconds(2))); + routers.clear(); + dhts.values().forEach(t -> t.stop()); + dhts.clear(); + } + + @BeforeEach + public void before() throws Exception { + validator = new LeydenJar.OpValidator() { + @Override + public boolean validateBind(Bound bound, byte[] token) { + return true; + } + + @Override + public boolean validateGet(byte[] key, byte[] token) { + return true; + } + + @Override + public boolean validateUnbind(byte[] key, byte[] token) { + return true; + } + }; + prefix = UUID.randomUUID().toString(); + var entropy = SecureRandom.getInstance("SHA1PRNG"); + entropy.setSeed(new byte[] { 6, 6, 6 }); + var kerl = new MemKERL(DigestAlgorithm.DEFAULT); + var stereotomy = new StereotomyImpl(new MemKeyStore(), kerl, entropy); + var cardinality = 5; + var identities = IntStream.range(0, cardinality) + .mapToObj(i -> stereotomy.newIdentifier()) + .collect(Collectors.toMap(controlled -> new ControlledIdentifierMember(controlled), + controlled -> controlled)); + context = Context.newBuilder().setpByz(PBYZ).setCardinality(cardinality).build(); + identities.keySet().forEach(m -> context.activate(m)); + identities.keySet().forEach(member -> instantiate(member, context)); + + System.out.println(); + System.out.println(); + System.out.println(String.format("Cardinality: %s, Prob Byz: %s, Rings: %s Majority: %s", cardinality, PBYZ, + context.getRingCount(), context.majority())); + System.out.println(); + } + + @Test + public void smokin() { + routers.values().forEach(r -> r.start()); + dhts.values().forEach(lj -> lj.start(Duration.ofMillis(10))); + + var source = dhts.firstEntry().getValue(); + var sink = dhts.lastEntry().getValue(); + + var key = ByteString.copyFrom("hello".getBytes()); + var value = ByteString.copyFrom("world".getBytes()); + var binding = Binding.newBuilder().setBound(Bound.newBuilder().setKey(key).setValue(value).build()).build(); + source.bind(binding); + + for (var e : dhts.entrySet()) { + var success = Utils.waitForCondition(10_000, () -> { + Bound bound; + try { + bound = e.getValue().get(KeyAndToken.newBuilder().setKey(key).build()); + } catch (NoSuchElementException nse) { + try { + Thread.sleep(100); + } catch (InterruptedException ex) { + } + return false; + } + return bound != null; + }); + assertTrue(success, "Failed for " + e.getKey().getId()); + } + } + + protected void instantiate(SigningMember member, Context context) { + final var url = String.format("jdbc:h2:mem:%s-%s;DB_CLOSE_ON_EXIT=FALSE", member.getId(), prefix); + JdbcConnectionPool connectionPool = JdbcConnectionPool.create(url, "", ""); + connectionPool.setMaxConnections(10); + var exec = Executors.newVirtualThreadPerTaskExecutor(); + var router = new LocalServer(prefix, member).router(ServerConnectionCache.newBuilder().setTarget(2)); + routers.put(member, router); + dhts.put(member, + new LeydenJar(validator, Duration.ofSeconds(5), member, context, Duration.ofMillis(10), router, 0.0125, + DigestAlgorithm.DEFAULT, new MVStore.Builder().open(), null, null)); + } +} diff --git a/leyden/src/test/resources/logback-test.xml b/leyden/src/test/resources/logback-test.xml new file mode 100644 index 0000000000..e8e34bf675 --- /dev/null +++ b/leyden/src/test/resources/logback-test.xml @@ -0,0 +1,30 @@ + + + + + + + + + + %d{mm:ss.SSS} [%thread] %-5level %logger{0} - %msg%n + + + + + + + + + + + + + + + + + + + + diff --git a/liquibase-deterministic/README.md b/liquibase-deterministic/README.md new file mode 100644 index 0000000000..47f2c66798 --- /dev/null +++ b/liquibase-deterministic/README.md @@ -0,0 +1,3 @@ +# Deterministic Liquibase + +Modifications to Liquibase to support deterministic SQL execution of migrations.~~~~ diff --git a/memberships/pom.xml b/memberships/pom.xml index 154f335a65..d1d312b63a 100644 --- a/memberships/pom.xml +++ b/memberships/pom.xml @@ -15,6 +15,16 @@ org.slf4j slf4j-api + + com.salesforce.apollo + domain-kqueue + provided + + + com.salesforce.apollo + domain-epoll + provided + com.salesforce.apollo protocols @@ -40,13 +50,10 @@ native-lib-loader - com.salesforce.apollo - domain-epoll - provided - - - com.salesforce.apollo - domain-kqueue + io.netty + netty-transport-native-unix-common + ${netty.version} + ${os.detected.classifier} provided @@ -77,4 +84,51 @@ test + + + + + kr.motd.maven + os-maven-plugin + 1.7.1 + + + + + + + mac-domain + + + mac + + + + + io.netty + netty-transport-native-kqueue + ${netty.version} + ${os.detected.classifier} + provided + + + + + linux-domain + + + linux + + + + + io.netty + netty-transport-native-epoll + ${netty.version} + ${os.detected.classifier} + provided + + + + diff --git a/memberships/src/main/java/com/salesforce/apollo/archipelago/Enclave.java b/memberships/src/main/java/com/salesforce/apollo/archipelago/Enclave.java index b71c9fc072..242cf22793 100644 --- a/memberships/src/main/java/com/salesforce/apollo/archipelago/Enclave.java +++ b/memberships/src/main/java/com/salesforce/apollo/archipelago/Enclave.java @@ -39,10 +39,10 @@ * @author hal.hildebrand */ public class Enclave implements RouterSupplier { - private final static Executor executor = Executors.newVirtualThreadPerTaskExecutor(); private final static Class channelType = IMPL.getChannelType(); private static final Logger log = LoggerFactory.getLogger(Enclave.class); + private final Executor executor = Executors.newVirtualThreadPerTaskExecutor(); private final DomainSocketAddress bridge; private final Consumer contextRegistration; private final DomainSocketAddress endpoint; diff --git a/memberships/src/main/java/com/salesforce/apollo/archipelago/LocalServer.java b/memberships/src/main/java/com/salesforce/apollo/archipelago/LocalServer.java index bc9ec5167c..7047f2039a 100644 --- a/memberships/src/main/java/com/salesforce/apollo/archipelago/LocalServer.java +++ b/memberships/src/main/java/com/salesforce/apollo/archipelago/LocalServer.java @@ -34,9 +34,9 @@ * @author hal.hildebrand */ public class LocalServer implements RouterSupplier { - private final static Executor executor = Executors.newVirtualThreadPerTaskExecutor(); private static final Logger log = LoggerFactory.getLogger(LocalServer.class); private static final String NAME_TEMPLATE = "%s-%s"; + private final Executor executor = Executors.newVirtualThreadPerTaskExecutor(); private final ClientInterceptor clientInterceptor; private final Member from; @@ -74,7 +74,7 @@ public RouterImpl router(ServerConnectionCache.Builder cacheBuilder, Supplier
  • serverBuilder = InProcessServerBuilder.forName(name) - .executor(executor) + .executor(Executors.newVirtualThreadPerTaskExecutor()) .intercept(ConcurrencyLimitServerInterceptor.newBuilder( limitsBuilder.build()) .statusSupplier( diff --git a/memberships/src/main/java/com/salesforce/apollo/archipelago/ManagedServerChannel.java b/memberships/src/main/java/com/salesforce/apollo/archipelago/ManagedServerChannel.java index 7635b26410..01cc45e07e 100644 --- a/memberships/src/main/java/com/salesforce/apollo/archipelago/ManagedServerChannel.java +++ b/memberships/src/main/java/com/salesforce/apollo/archipelago/ManagedServerChannel.java @@ -6,24 +6,21 @@ */ package com.salesforce.apollo.archipelago; -import static com.salesforce.apollo.cryptography.QualifiedBase64.qb64; - -import java.util.concurrent.TimeUnit; - import com.google.common.base.MoreObjects; import com.salesforce.apollo.archipelago.ServerConnectionCache.ReleasableManagedChannel; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.membership.Member; - -import io.grpc.CallOptions; -import io.grpc.ClientCall; -import io.grpc.ConnectivityState; +import io.grpc.*; import io.grpc.ForwardingClientCall.SimpleForwardingClientCall; -import io.grpc.ManagedChannel; -import io.grpc.Metadata; -import io.grpc.MethodDescriptor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.TimeUnit; + +import static com.salesforce.apollo.cryptography.QualifiedBase64.qb64; public class ManagedServerChannel extends ManagedChannel { + private final static Logger log = LoggerFactory.getLogger(ManagedServerChannel.class); private final Digest context; private final ReleasableManagedChannel delegate; @@ -68,10 +65,10 @@ public boolean isTerminated() { } @Override - public ClientCall newCall(MethodDescriptor methodDescriptor, - CallOptions callOptions) { - return new SimpleForwardingClientCall(delegate.getChannel() - .newCall(methodDescriptor, callOptions)) { + public ClientCall newCall( + MethodDescriptor methodDescriptor, CallOptions callOptions) { + return new SimpleForwardingClientCall( + delegate.getChannel().newCall(methodDescriptor, callOptions)) { @Override public void start(Listener responseListener, Metadata headers) { headers.put(Router.METADATA_CONTEXT_KEY, qb64(context)); @@ -97,12 +94,19 @@ public void resetConnectBackoff() { @Override public ManagedChannel shutdown() { - return delegate.getChannel().shutdown(); + if (log.isTraceEnabled()) { + log.trace("Shutting down connection to: {} on: {}", delegate.getMember().getId(), delegate.getFrom(), + new Exception("Shutdown stacktrace")); + } else if (log.isDebugEnabled()) { + log.debug("Shutting down connection to: {} on: {}", delegate.getMember().getId(), delegate.getFrom()); + } + return delegate.shutdown(); } @Override public ManagedChannel shutdownNow() { - return delegate.getChannel().shutdownNow(); + log.trace("Shutting down connection (now) to: {} on: {}", delegate.getMember().getId(), delegate.getFrom()); + return delegate.shutdownNow(); } @Override diff --git a/memberships/src/main/java/com/salesforce/apollo/archipelago/MtlsClient.java b/memberships/src/main/java/com/salesforce/apollo/archipelago/MtlsClient.java index 98e98eaf0e..8d1eeff417 100644 --- a/memberships/src/main/java/com/salesforce/apollo/archipelago/MtlsClient.java +++ b/memberships/src/main/java/com/salesforce/apollo/archipelago/MtlsClient.java @@ -6,28 +6,26 @@ */ package com.salesforce.apollo.archipelago; -import java.net.SocketAddress; -import java.util.concurrent.Executor; -import java.util.concurrent.Executors; - import com.netflix.concurrency.limits.Limiter; import com.netflix.concurrency.limits.grpc.client.ConcurrencyLimitClientInterceptor; import com.netflix.concurrency.limits.grpc.client.GrpcClientLimiterBuilder; import com.netflix.concurrency.limits.grpc.client.GrpcClientRequestContext; import com.salesforce.apollo.comm.grpc.ClientContextSupplier; import com.salesforce.apollo.cryptography.ssl.CertificateValidator; - import io.grpc.ManagedChannel; import io.grpc.Status; import io.grpc.netty.NettyChannelBuilder; import io.netty.handler.ssl.ClientAuth; +import java.net.SocketAddress; +import java.util.concurrent.Executor; +import java.util.concurrent.Executors; + /** * @author hal.hildebrand - * */ public class MtlsClient { - private final static Executor exec = Executors.newVirtualThreadPerTaskExecutor(); + private final Executor exec = Executors.newVirtualThreadPerTaskExecutor(); private final ManagedChannel channel; @@ -39,7 +37,8 @@ public MtlsClient(SocketAddress address, ClientAuth clientAuth, String alias, Cl .executor(exec) .sslContext(supplier.forClient(clientAuth, alias, validator, MtlsServer.TL_SV1_3)) .intercept(new ConcurrencyLimitClientInterceptor(limiter, - () -> Status.RESOURCE_EXHAUSTED.withDescription("Client side concurrency limit exceeded"))) + () -> Status.RESOURCE_EXHAUSTED.withDescription( + "Client side concurrency limit exceeded"))) .build(); } diff --git a/memberships/src/main/java/com/salesforce/apollo/archipelago/MtlsServer.java b/memberships/src/main/java/com/salesforce/apollo/archipelago/MtlsServer.java index 7bd34a5892..7ae6f4661c 100644 --- a/memberships/src/main/java/com/salesforce/apollo/archipelago/MtlsServer.java +++ b/memberships/src/main/java/com/salesforce/apollo/archipelago/MtlsServer.java @@ -39,7 +39,6 @@ import java.security.Security; import java.security.cert.X509Certificate; import java.util.concurrent.ExecutionException; -import java.util.concurrent.Executor; import java.util.concurrent.Executors; import java.util.function.Function; import java.util.function.Supplier; @@ -49,7 +48,6 @@ */ public class MtlsServer implements RouterSupplier { static final String TL_SV1_3 = "TLSv1.3"; - private final static Executor executor = Executors.newVirtualThreadPerTaskExecutor(); private static final Provider PROVIDER_JSSE = Security.getProvider("SunJSSE"); private final LoadingCache cachedMembership; private final Function contextSupplier; @@ -139,7 +137,7 @@ public RouterImpl router(ServerConnectionCache.Builder cacheBuilder, Supplier
  • { - private static final Executor executor = Executors.newVirtualThreadPerTaskExecutor(); private final static Class channelType = IMPL.getChannelType(); + private final Executor executor = Executors.newVirtualThreadPerTaskExecutor(); private final String agent; private final EventLoopGroup eventLoopGroup = IMPL.getEventLoopGroup(); private final Demultiplexer inbound; diff --git a/memberships/src/main/java/com/salesforce/apollo/archipelago/RouterImpl.java b/memberships/src/main/java/com/salesforce/apollo/archipelago/RouterImpl.java index e619502ec9..b72758a28d 100644 --- a/memberships/src/main/java/com/salesforce/apollo/archipelago/RouterImpl.java +++ b/memberships/src/main/java/com/salesforce/apollo/archipelago/RouterImpl.java @@ -37,23 +37,26 @@ */ public class RouterImpl implements Router { - private final static Logger log = LoggerFactory.getLogger(RouterImpl.class); - private final ServerConnectionCache cache; - private final ClientIdentity clientIdentityProvider; - private final Consumer contextRegistration; - private final Member from; - private final MutableHandlerRegistry registry = new MutableHandlerRegistry(); - private final Server server; - private final Map> services = new ConcurrentHashMap<>(); - private final AtomicBoolean started = new AtomicBoolean(); + private final static Logger log = LoggerFactory.getLogger(RouterImpl.class); + private final ServerConnectionCache cache; + private final ClientIdentity clientIdentityProvider; + private final Consumer contextRegistration; + private final Member from; + private final MutableHandlerRegistry registry = new MutableHandlerRegistry(); + private final Server server; + private final Map> services = new ConcurrentHashMap<>(); + private final AtomicBoolean started = new AtomicBoolean(); + public RouterImpl(Member from, ServerBuilder serverBuilder, ServerConnectionCache.Builder cacheBuilder, ClientIdentity clientIdentityProvider) { - this(from, serverBuilder, cacheBuilder, clientIdentityProvider, d -> {}); + this(from, serverBuilder, cacheBuilder, clientIdentityProvider, d -> { + }); } + public RouterImpl(Member from, ServerBuilder serverBuilder, ServerConnectionCache.Builder cacheBuilder, ClientIdentity clientIdentityProvider, Consumer contextRegistration) { this.server = serverBuilder.fallbackHandlerRegistry(registry).intercept(serverInterceptor()).build(); - this.cache = cacheBuilder.build(); + this.cache = cacheBuilder.clone().setMember(from.getId()).build(); this.clientIdentityProvider = clientIdentityProvider; this.contextRegistration = contextRegistration; this.from = from; @@ -185,7 +188,7 @@ public class CommonCommunications implements Route private final CreateClientCommunications createFunction; private final Member from; private final Client localLoopback; - private final RoutableService routing; + private final RoutableService routing; public CommonCommunications(Digest context, Member from, RoutableService routing) { this(context, from, routing, m -> vanilla(from), vanilla(from)); diff --git a/memberships/src/main/java/com/salesforce/apollo/archipelago/ServerConnectionCache.java b/memberships/src/main/java/com/salesforce/apollo/archipelago/ServerConnectionCache.java index daebe5f4ec..75c8e6b36c 100644 --- a/memberships/src/main/java/com/salesforce/apollo/archipelago/ServerConnectionCache.java +++ b/memberships/src/main/java/com/salesforce/apollo/archipelago/ServerConnectionCache.java @@ -6,262 +6,102 @@ */ package com.salesforce.apollo.archipelago; -import java.time.Clock; -import java.time.Duration; -import java.time.Instant; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; -import java.util.PriorityQueue; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.locks.ReentrantLock; -import java.util.function.Supplier; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.codahale.metrics.Counter; import com.codahale.metrics.Meter; import com.codahale.metrics.Timer; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.membership.Member; - import io.grpc.ManagedChannel; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.time.Clock; +import java.time.Duration; +import java.time.Instant; +import java.util.*; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.locks.ReentrantLock; +import java.util.function.Supplier; /** - * - * Privides a safe mechanism for caching expensive connections to a server. We - * use MTLS, so we want to make good use of the ManagedChannels. Fireflies, by - * its nature, will keep some subset of connections open for gossip use, based - * on a ring. Avalanche samples a random subset of known servers. Ghost has - * access patterns based on hahes. And so on. + * Privides a safe mechanism for caching expensive connections to a server. We use MTLS, so we want to make good use of + * the ManagedChannels. Fireflies, by its nature, will keep some subset of connections open for gossip use, based on a + * ring. Avalanche samples a random subset of known servers. Ghost has access patterns based on hahes. And so on. *

    - * This cache allows grpc clients to reuse the underlying ManagedChannel as - * "Bob" inteneded, enforcing some upper limit on the connections used. + * This cache allows grpc clients to reuse the underlying ManagedChannel as "Bob" inteneded, enforcing some upper limit + * on the connections used. *

    - * ManagedChannels are never closed while they are open and used by a client - * stub. Connections can be opened up to some total limit, which does not have - * to be the target number of open + idle connections. ManagedChannels in the - * cache keep track of their overall usage count by client stubs - each borrow - * increments this usage count. + * ManagedChannels are never closed while they are open and used by a client stub. Connections can be opened up to some + * total limit, which does not have to be the target number of open + idle connections. ManagedChannels in the cache + * keep track of their overall usage count by client stubs - each borrow increments this usage count. *

    - * When ManagedChannels are closed, they are closed in the order of least usage - * count. ManagedChannels may also have a minimum idle duration, to prevent - * cache thrashing. When this duration is > 0, the connection will not be - * closed, potentially overshooting target cache counts - * - * @author hal.hildebrand + * When ManagedChannels are closed, they are closed in the order of least usage count. ManagedChannels may also have a + * minimum idle duration, to prevent cache thrashing. When this duration is > 0, the connection will not be closed, + * potentially overshooting target cache counts * + * @author hal.hildebrand */ public class ServerConnectionCache { - public static class Builder { - private Clock clock = Clock.systemUTC(); - private ServerConnectionFactory factory = null; - private ServerConnectionCacheMetrics metrics; - private Duration minIdle = Duration.ofMillis(100); - private int target = 10; - - public ServerConnectionCache build() { - return new ServerConnectionCache(factory, target, minIdle, clock, metrics); - } - - public Clock getClock() { - return clock; - } - - public ServerConnectionFactory getFactory() { - return factory; - } - - public ServerConnectionCacheMetrics getMetrics() { - return metrics; - } - - public Duration getMinIdle() { - return minIdle; - } - - public int getTarget() { - return target; - } - - public Builder setClock(Clock clock) { - this.clock = clock; - return this; - } - - public Builder setFactory(ServerConnectionFactory factory) { - this.factory = factory; - return this; - } - - public Builder setMetrics(ServerConnectionCacheMetrics metrics) { - this.metrics = metrics; - return this; - } - - public Builder setMinIdle(Duration minIdle) { - this.minIdle = minIdle; - return this; - } - - public Builder setTarget(int target) { - this.target = target; - return this; - } - } - - @FunctionalInterface - public interface CreateClientCommunications { - Client create(ManagedServerChannel channel); - } - - public static interface ServerConnectionCacheMetrics { - - Meter borrowRate(); - - Timer channelOpenDuration(); - - Meter closeConnectionRate(); - - Counter createConnection(); - - Meter failedConnectionRate(); - - Counter failedOpenConnection(); - - Counter openConnections(); - - Meter releaseRate(); - - } - - public interface ServerConnectionFactory { - ManagedChannel connectTo(Member to); - } - - static class ReleasableManagedChannel implements Comparable { - private final AtomicInteger borrowed = new AtomicInteger(); - private final ManagedChannel channel; - private final Instant created; - private volatile Instant lastUsed; - private final Member member; - private final ServerConnectionCache scc; - private final AtomicInteger usageCount = new AtomicInteger(); - - public ReleasableManagedChannel(Member id, ManagedChannel channel, ServerConnectionCache scc) { - this.member = id; - this.channel = channel; - this.scc = scc; - created = Instant.now(scc.clock); - lastUsed = Instant.now(scc.clock); - } - - @Override - public int compareTo(ReleasableManagedChannel o) { - return Integer.compare(usageCount.get(), o.usageCount.get()); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if ((obj == null) || (getClass() != obj.getClass())) - return false; - return member.equals(((ReleasableManagedChannel) obj).member); - } - - public ManagedChannel getChannel() { - return channel; - } - - public Member getMember() { - return member; - } - - @Override - public int hashCode() { - return member.hashCode(); - } - - public boolean isCloseable() { - return lastUsed.plus(scc.minIdle).isBefore(Instant.now(scc.clock)); - } - - public void release() { - scc.release(this); - } - - private boolean decrementBorrow() { - if (borrowed.decrementAndGet() == 0) { - lastUsed = Instant.now(scc.clock); - return true; - } - return false; - } - - private boolean incrementBorrow() { - usageCount.incrementAndGet(); - return borrowed.incrementAndGet() == 1; - } - } - - private final static Logger log = LoggerFactory.getLogger(ServerConnectionCache.class); - - public static Builder newBuilder() { - return new Builder(); - } - - private final Map cache = new HashMap<>(); - private final Clock clock; - private final ServerConnectionFactory factory; - private final ReentrantLock lock = new ReentrantLock(true); - private final ServerConnectionCacheMetrics metrics; - private final Duration minIdle; - private final PriorityQueue queue = new PriorityQueue<>(); - private final int target; - - public ServerConnectionCache(ServerConnectionFactory factory, int target, Duration minIdle, Clock clock, - ServerConnectionCacheMetrics metrics) { + private final static Logger log = LoggerFactory.getLogger( + ServerConnectionCache.class); + private final Map cache = new HashMap<>(); + private final Clock clock; + private final ServerConnectionFactory factory; + private final ReentrantLock lock = new ReentrantLock(true); + private final ServerConnectionCacheMetrics metrics; + private final Duration minIdle; + private final PriorityQueue queue = new PriorityQueue<>(); + private final int target; + private final Digest member; + + public ServerConnectionCache(Digest member, ServerConnectionFactory factory, int target, Duration minIdle, + Clock clock, ServerConnectionCacheMetrics metrics) { + assert member != null; this.factory = factory; this.target = Math.max(target, 1); this.minIdle = minIdle; this.clock = clock; this.metrics = metrics; + this.member = member; + } + + public static Builder newBuilder() { + return new Builder(); } public ManagedServerChannel borrow(Digest context, Member to) { return lock(() -> { if (cache.size() >= target) { - log.debug("Cache target open connections exceeded: {}, opening to {}", target, to); + log.debug("Cache target open connections exceeded: {}, opening to: {} on: {}", target, to.getId(), + member); } - ReleasableManagedChannel connection = cache.computeIfAbsent(to, member -> { - ReleasableManagedChannel conn = new ReleasableManagedChannel(to, factory.connectTo(to), this); + ReleasableManagedChannel connection = cache.computeIfAbsent(to, m -> { + log.debug("Creating new channel to: {} on: {}", to.getId(), m.getId()); + ManagedChannel channel; + try { + channel = factory.connectTo(to); + } catch (Throwable t) { + log.error("Cannot connect to: {} on: {}", to.getId(), member, t); + return null; + } + ReleasableManagedChannel conn = new ReleasableManagedChannel(to, channel, member); if (metrics != null) { metrics.createConnection().inc(); metrics.openConnections().inc(); } return conn; }); - if (connection == null) { - log.warn("Failed to open channel to {}", to); - if (metrics != null) { - metrics.failedOpenConnection().inc(); - metrics.failedConnectionRate().mark(); - } - return null; - } if (connection.incrementBorrow()) { - log.debug("Opened channel to {}, last used: {}", connection.member, connection.lastUsed); + log.debug("Increment borrow to: {} channel to: {} on: {}", connection.borrowed, + connection.member.getId(), member); if (metrics != null) { metrics.borrowRate().mark(); } queue.remove(connection); } - log.trace("Opened channel to {}, borrowed: {}, usage: {}", connection.member, connection.borrowed, - connection.usageCount); + log.trace("Borrowed channel to: {}, borrowed: {}, usage: {} on: {}", connection.member.getId(), + connection.borrowed, connection.usageCount, member); return new ManagedServerChannel(context, connection); }); } @@ -272,7 +112,7 @@ public T borrow(Digest context, Member to, CreateClientCommunications cre public void close() { lock(() -> { - log.info("Closing connection cache"); + log.info("Closing connection cache on: {}", member); for (ReleasableManagedChannel conn : new ArrayList<>(cache.values())) { try { conn.channel.shutdownNow(); @@ -281,7 +121,7 @@ public void close() { metrics.openConnections().dec(); } } catch (Throwable e) { - log.debug("Error closing {}", conn.member); + log.debug("Error closing connection to: {} on: {}", conn.member.getId(), member); } } cache.clear(); @@ -293,7 +133,7 @@ public void close() { public void release(ReleasableManagedChannel connection) { lock(() -> { if (connection.decrementBorrow()) { - log.debug("Releasing connection: {}", connection.member); + log.debug("Releasing connection to: {} on: {}", connection.member.getId(), member); queue.add(connection); if (metrics != null) { metrics.releaseRate().mark(); @@ -309,9 +149,9 @@ private boolean close(ReleasableManagedChannel connection) { try { connection.channel.shutdownNow(); } catch (Throwable t) { - log.debug("Error closing {}", connection.member); + log.debug("Error closing connection to: {} on: {}", connection.member.getId(), connection.member); } - log.debug("{} is closed", connection.member); + log.debug("connection to: {} is closed on: {}", connection.member.getId(), member); cache.remove(connection.member); if (metrics != null) { metrics.openConnections().dec(); @@ -333,7 +173,7 @@ private T lock(Supplier supplier) { } private void manageConnections() { -// log.info("Managing connections: " + cache.size() + " idle: " + queue.size()); + log.debug("Managing connections: {} idle: {} on: {}", cache.size(), queue.size(), member); Iterator connections = queue.iterator(); while (connections.hasNext() && cache.size() > target) { if (close(connections.next())) { @@ -341,4 +181,188 @@ private void manageConnections() { } } } + + @FunctionalInterface + public interface CreateClientCommunications { + Client create(ManagedServerChannel channel); + } + + public interface ServerConnectionCacheMetrics { + + Meter borrowRate(); + + Timer channelOpenDuration(); + + Meter closeConnectionRate(); + + Counter createConnection(); + + Meter failedConnectionRate(); + + Counter failedOpenConnection(); + + Counter openConnections(); + + Meter releaseRate(); + + } + + public interface ServerConnectionFactory { + ManagedChannel connectTo(Member to); + } + + public static class Builder implements Cloneable { + private Clock clock = Clock.systemUTC(); + private ServerConnectionFactory factory = null; + private ServerConnectionCacheMetrics metrics; + private Duration minIdle = Duration.ofMillis(100); + private int target = 10; + private Digest member; + + public ServerConnectionCache build() { + return new ServerConnectionCache(member, factory, target, minIdle, clock, metrics); + } + + @Override + public Builder clone() { + try { + return (Builder) super.clone(); + } catch (CloneNotSupportedException e) { + throw new IllegalStateException(e); + } + } + + public Clock getClock() { + return clock; + } + + public Builder setClock(Clock clock) { + this.clock = clock; + return this; + } + + public ServerConnectionFactory getFactory() { + return factory; + } + + public Builder setFactory(ServerConnectionFactory factory) { + this.factory = factory; + return this; + } + + public Digest getMember() { + return member; + } + + public Builder setMember(Digest member) { + this.member = member; + return this; + } + + public ServerConnectionCacheMetrics getMetrics() { + return metrics; + } + + public Builder setMetrics(ServerConnectionCacheMetrics metrics) { + this.metrics = metrics; + return this; + } + + public Duration getMinIdle() { + return minIdle; + } + + public Builder setMinIdle(Duration minIdle) { + this.minIdle = minIdle; + return this; + } + + public int getTarget() { + return target; + } + + public Builder setTarget(int target) { + this.target = target; + return this; + } + } + + class ReleasableManagedChannel implements Comparable { + private final AtomicInteger borrowed = new AtomicInteger(); + private final ManagedChannel channel; + private final Instant created; + private final Member member; + private final AtomicInteger usageCount = new AtomicInteger(); + private final Digest from; + private volatile Instant lastUsed; + + public ReleasableManagedChannel(Member member, ManagedChannel channel, Digest from) { + this.member = member; + this.channel = channel; + created = Instant.now(clock); + lastUsed = Instant.now(clock); + this.from = from; + } + + @Override + public int compareTo(ReleasableManagedChannel o) { + return Integer.compare(usageCount.get(), o.usageCount.get()); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if ((obj == null) || (getClass() != obj.getClass())) + return false; + return member.equals(((ReleasableManagedChannel) obj).member); + } + + public ManagedChannel getChannel() { + return channel; + } + + public Digest getFrom() { + return from; + } + + public Member getMember() { + return member; + } + + @Override + public int hashCode() { + return member.hashCode(); + } + + public boolean isCloseable() { + return lastUsed.plus(minIdle).isBefore(Instant.now(clock)); + } + + public void release() { + log.trace("Release connection to: {} on: {}", getMember().getId(), getFrom()); + ServerConnectionCache.this.release(this); + } + + public ManagedChannel shutdown() { + throw new IllegalStateException("Should not be called"); + } + + public ManagedChannel shutdownNow() { + throw new IllegalStateException("Should not be called"); + } + + private boolean decrementBorrow() { + if (borrowed.decrementAndGet() == 0) { + lastUsed = Instant.now(clock); + return true; + } + return false; + } + + private boolean incrementBorrow() { + usageCount.incrementAndGet(); + return borrowed.incrementAndGet() == 1; + } + } } diff --git a/memberships/src/main/java/com/salesforce/apollo/membership/CompactContext.java b/memberships/src/main/java/com/salesforce/apollo/membership/CompactContext.java index c2d3133226..2b295ec98b 100644 --- a/memberships/src/main/java/com/salesforce/apollo/membership/CompactContext.java +++ b/memberships/src/main/java/com/salesforce/apollo/membership/CompactContext.java @@ -6,31 +6,117 @@ */ package com.salesforce.apollo.membership; -import static com.salesforce.apollo.membership.Context.hashFor; -import static com.salesforce.apollo.membership.Context.minMajority; +import com.salesforce.apollo.cryptography.Digest; +import com.salesforce.apollo.cryptography.DigestAlgorithm; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Iterator; -import java.util.List; -import java.util.NoSuchElementException; -import java.util.TreeMap; +import java.util.*; import java.util.function.Predicate; import java.util.stream.Stream; import java.util.stream.StreamSupport; -import com.salesforce.apollo.cryptography.Digest; -import com.salesforce.apollo.cryptography.DigestAlgorithm; +import static com.salesforce.apollo.membership.Context.hashFor; +import static com.salesforce.apollo.membership.Context.minMajority; /** - * Compact context structure that mimics a context, but only tracks the digest - * ids of the members. + * Compact context structure that mimics a context, but only tracks the digest ids of the members. * * @author hal.hildebrand - * */ public class CompactContext { + private final Digest id; + private final Digest[] ids; + private final int[][] ringMap; + private final Digest[][] rings; + + public CompactContext(Digest id, int cardinality, double pByz, int bias, List ids, double epsilon) { + this(id, ids, (short) ((minMajority(pByz, cardinality, epsilon, bias) * bias) + 1)); + } + + public CompactContext(Digest id, List ids, short rings) { + this.id = id; + this.ids = new Digest[ids.size()]; + this.rings = new Digest[rings][]; + for (int j = 0; j < rings; j++) { + this.rings[j] = new Digest[ids.size()]; + } + this.ringMap = new int[rings][]; + for (int j = 0; j < rings; j++) { + this.ringMap[j] = new int[ids.size()]; + } + initialize(ids); + } + + public static Builder newBuilder() { + return new Builder(); + } + + public static Builder newBuilder(Context.Builder ctxBuilder) { + return new Builder(ctxBuilder); + } + + public int getRingCount() { + return rings.length; + } + + public List predecessors(Digest digest) { + var predecessors = new ArrayList(); + for (var i = 0; i < rings.length; i++) { + predecessors.add(new CompactRing(i).predecessor(digest)); + } + return predecessors; + } + + public List predecessors(Digest digest, Predicate test) { + var predecessors = new ArrayList(); + for (var i = 0; i < rings.length; i++) { + predecessors.add(new CompactRing(i).predecessor(digest, test)); + } + return predecessors; + } + + public CompactRing ring(int index) { + if (index < 0 || index >= rings.length) { + throw new IndexOutOfBoundsException(index); + } + return new CompactRing(index); + } + + public List successors(Digest digest) { + var successors = new ArrayList(); + for (var i = 0; i < rings.length; i++) { + successors.add(new CompactRing(i).successor(digest)); + } + return successors; + } + + public List successors(Digest digest, Predicate test) { + var successors = new ArrayList(); + for (var i = 0; i < rings.length; i++) { + successors.add(new CompactRing(i).successor(digest, test)); + } + return successors; + } + + private void initialize(List members) { + for (short i = 0; i < members.size(); i++) { + ids[i] = members.get(i); + } + Arrays.sort(ids); + for (int j = 0; j < rings.length; j++) { + var mapped = new TreeMap(); + for (short i = 0; i < ids.length; i++) { + mapped.put(hashFor(id, j, ids[i]), i); + } + short index = 0; + for (var e : mapped.entrySet()) { + rings[j][index] = e.getKey(); + ringMap[j][index] = e.getValue(); + index++; + } + } + } + public static class Builder { private int bias = 2; private double epsilon = Context.DEFAULT_EPSILON; @@ -59,42 +145,42 @@ public int getBias() { return bias; } - public double getEpsilon() { - return epsilon; - } - - public Digest getId() { - return id; - } - - public List getMembers() { - return members; - } - - public double getpByz() { - return pByz; - } - public Builder setBias(int bias) { this.bias = bias; return this; } + public double getEpsilon() { + return epsilon; + } + public Builder setEpsilon(double epsilon) { this.epsilon = epsilon; return this; } + public Digest getId() { + return id; + } + public Builder setId(Digest id) { this.id = id; return this; } + public List getMembers() { + return members; + } + public Builder setMembers(List members) { this.members = members; return this; } + public double getpByz() { + return pByz; + } + public Builder setpByz(double pByz) { this.pByz = pByz; return this; @@ -102,70 +188,6 @@ public Builder setpByz(double pByz) { } public class CompactRing { - private static class HeadIterator implements Iterator { - private int current; - private Digest[] ids; - private Ring ring; - private final int start; - private final Predicate test; - - private HeadIterator(int start, Predicate test) { - this.start = start; - this.test = test; - current = (start - 1) % ids.length; - } - - @Override - public boolean hasNext() { - return current != start && test.test(ring.get(current, ids)); - } - - @Override - public Digest next() { - if (current == start || test.test(ring.get(current, ids))) { - throw new NoSuchElementException(); - } - var digest = ring.get(start, ids); - current = (current + 1) % ids.length; - return digest; - } - } - - private record Ring(Digest[] keys, short[] values) { - private Digest get(int i, Digest[] ids) { - return ids[values[i]]; - } - } - - private static class TailIterator implements Iterator { - private int current; - private Digest[] ids; - private Ring ring; - private final int start; - private final Predicate test; - - private TailIterator(int start, Predicate test) { - this.start = start; - this.test = test; - current = (start - 1) % ids.length; - } - - @Override - public boolean hasNext() { - return current != start && test.test(ring.get(current, ids)); - } - - @Override - public Digest next() { - if (current == start || test.test(ring.get(current, ids))) { - throw new NoSuchElementException(); - } - var digest = ring.get(start, ids); - current = current - 1 % ids.length; - return digest; - } - } - private final int index; private CompactRing(int index) { @@ -198,8 +220,8 @@ public Stream stream() { @Override public Iterator iterator() { return new Iterator() { - private int current = 0; - private final Ring ring = ring(); + private final Ring ring = ring(); + private int current = 0; @Override public boolean hasNext() { @@ -239,9 +261,9 @@ public Iterable sucessors(Digest digest, Predicate test) { private Digest pred(Digest digest, Predicate test) { var ring = ring(); - short startIndex = (short) Arrays.binarySearch(ring.keys, digest); + short startIndex = (short) Arrays.binarySearch(ring.rehashed, digest); if (startIndex < 0) { - for (short i = (short) (ring.keys.length - 1); i >= 0; i--) { + for (short i = (short) (ring.rehashed.length - 1); i >= 0; i--) { final var tested = ring.get(i, ids); if (test.test(tested)) { return tested; @@ -255,7 +277,7 @@ private Digest pred(Digest digest, Predicate test) { return tested; } } - for (short i = (short) (ring.keys.length - 1); i > startIndex; i--) { + for (short i = (short) (ring.rehashed.length - 1); i > startIndex; i--) { final var tested = ring.get(i, ids); if (test.test(tested)) { return tested; @@ -266,7 +288,7 @@ private Digest pred(Digest digest, Predicate test) { private Iterable preds(Digest digest, Predicate test) { var ring = ring(); - short startIndex = (short) Arrays.binarySearch(ring.keys, digest); + short startIndex = (short) Arrays.binarySearch(ring.rehashed, digest); final var iterator = new HeadIterator(startIndex, test); return new Iterable<>() { @@ -283,9 +305,9 @@ private Ring ring() { private Digest succ(Digest digest, Predicate test) { var ring = ring(); - short startIndex = (short) Arrays.binarySearch(ring.keys, digest); + short startIndex = (short) Arrays.binarySearch(ring.rehashed, digest); if (startIndex < 0) { - for (short i = 0; i < ring.keys.length; i++) { + for (short i = 0; i < ring.rehashed.length; i++) { final var tested = ring.get(i, ids); if (test.test(tested)) { return tested; @@ -293,7 +315,7 @@ private Digest succ(Digest digest, Predicate test) { } return null; } - for (short i = (short) (startIndex + 1); i < ring.keys.length; i++) { + for (short i = (short) (startIndex + 1); i < ring.rehashed.length; i++) { final var tested = ring.get(i, ids); if (test.test(tested)) { return tested; @@ -310,7 +332,7 @@ private Digest succ(Digest digest, Predicate test) { private Iterable succs(Digest digest, Predicate test) { var ring = ring(); - short startIndex = (short) Arrays.binarySearch(ring.keys, digest); + short startIndex = (short) Arrays.binarySearch(ring.rehashed, digest); final var iterator = new TailIterator(startIndex, test); return new Iterable<>() { @@ -320,97 +342,69 @@ public Iterator iterator() { } }; } - } - public static Builder newBuilder() { - return new Builder(); - } - - public static Builder newBuilder(Context.Builder ctxBuilder) { - return new Builder(ctxBuilder); - } - - private final Digest id; - private final Digest[] ids; - private final short[][] ringMap; - private final Digest[][] rings; - - public CompactContext(Digest id, int cardinality, double pByz, int bias, List ids, double epsilon) { - this(id, ids, (short) ((minMajority(pByz, cardinality, epsilon, bias) * bias) + 1)); - } - - public CompactContext(Digest id, List ids, short rings) { - this.id = id; - this.ids = new Digest[ids.size()]; - this.rings = new Digest[rings][]; - for (int j = 0; j < rings; j++) { - this.rings[j] = new Digest[ids.size()]; - } - this.ringMap = new short[rings][]; - for (int j = 0; j < rings; j++) { - this.ringMap[j] = new short[ids.size()]; - } - initialize(ids); - } + private static class HeadIterator implements Iterator { + private final int start; + private final Predicate test; + private int current; + private Digest[] ids; + private Ring ring; - public int getRingCount() { - return rings.length; - } + private HeadIterator(int start, Predicate test) { + this.start = start; + this.test = test; + current = (start - 1) % ids.length; + } - public List predecessors(Digest digest) { - var predecessors = new ArrayList(); - for (var i = 0; i < rings.length; i++) { - predecessors.add(new CompactRing(i).predecessor(digest)); - } - return predecessors; - } + @Override + public boolean hasNext() { + return current != start && test.test(ring.get(current, ids)); + } - public List predecessors(Digest digest, Predicate test) { - var predecessors = new ArrayList(); - for (var i = 0; i < rings.length; i++) { - predecessors.add(new CompactRing(i).predecessor(digest, test)); + @Override + public Digest next() { + if (current == start || test.test(ring.get(current, ids))) { + throw new NoSuchElementException(); + } + var digest = ring.get(start, ids); + current = (current + 1) % ids.length; + return digest; + } } - return predecessors; - } - public CompactRing ring(int index) { - if (index < 0 || index >= rings.length) { - throw new IndexOutOfBoundsException(index); + // A Ring is a list of rehashed ids and a map from these ids to the original id + private record Ring(Digest[] rehashed, int[] mapping) { + private Digest get(int i, Digest[] ids) { + return ids[mapping[i]]; + } } - return new CompactRing(index); - } - public List successors(Digest digest) { - var successors = new ArrayList(); - for (var i = 0; i < rings.length; i++) { - successors.add(new CompactRing(i).successor(digest)); - } - return successors; - } + private static class TailIterator implements Iterator { + private final int start; + private final Predicate test; + private int current; + private Digest[] ids; + private Ring ring; - public List successors(Digest digest, Predicate test) { - var successors = new ArrayList(); - for (var i = 0; i < rings.length; i++) { - successors.add(new CompactRing(i).successor(digest, test)); - } - return successors; - } + private TailIterator(int start, Predicate test) { + this.start = start; + this.test = test; + current = (start - 1) % ids.length; + } - private void initialize(List members) { - for (short i = 0; i < members.size(); i++) { - ids[i] = members.get(i); - } - Arrays.sort(ids); - for (int j = 0; j < rings.length; j++) { - var mapped = new TreeMap(); - for (short i = 0; i < ids.length; i++) { - mapped.put(hashFor(id, j, ids[i]), i); + @Override + public boolean hasNext() { + return current != start && test.test(ring.get(current, ids)); } - short index = 0; - for (var e : mapped.entrySet()) { - rings[j][index] = e.getKey(); - ringMap[j][index] = e.getValue(); - index++; + + @Override + public Digest next() { + if (current == start || test.test(ring.get(current, ids))) { + throw new NoSuchElementException(); + } + var digest = ring.get(start, ids); + current = current - 1 % ids.length; + return digest; } } } diff --git a/memberships/src/main/java/com/salesforce/apollo/membership/Context.java b/memberships/src/main/java/com/salesforce/apollo/membership/Context.java index fd872d73fe..9887c7895c 100644 --- a/memberships/src/main/java/com/salesforce/apollo/membership/Context.java +++ b/memberships/src/main/java/com/salesforce/apollo/membership/Context.java @@ -6,111 +6,26 @@ */ package com.salesforce.apollo.membership; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.UUID; -import java.util.function.Predicate; -import java.util.stream.Stream; - -import org.apache.commons.math3.random.BitsStreamGenerator; - import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; +import org.apache.commons.math3.random.BitsStreamGenerator; + +import java.util.*; +import java.util.function.Predicate; +import java.util.stream.Stream; /** - * Provides a Context for Membership and is uniquely identified by a Digest;. - * Members may be either active or offline. The Context maintains a number of - * Rings (may be zero) that the Context provides for Firefly type consistent - * hash ring ordering operators. Each ring has a unique hash of each individual - * member, and thus each ring has a different ring order of the same membership - * set. Hashes for Context level operators include the ID of the ring. Hashes - * computed and cached for each member, per ring include the ID of the enclosing - * Context. - * - * @author hal.hildebrand + * Provides a Context for Membership and is uniquely identified by a Digest;. Members may be either active or offline. + * The Context maintains a number of Rings (may be zero) that the Context provides for Firefly type consistent hash ring + * ordering operators. Each ring has a unique hash of each individual member, and thus each ring has a different ring + * order of the same membership set. Hashes for Context level operators include the ID of the ring. Hashes computed and + * cached for each member, per ring include the ID of the enclosing Context. * + * @author hal.hildebrand */ public interface Context { - abstract class Builder { - protected int bias = 2; - protected int cardinality; - protected double epsilon = DEFAULT_EPSILON; - protected Digest id = DigestAlgorithm.DEFAULT.getOrigin(); - protected double pByz = 0.1; // 10% chance any node is out to get ya - - public abstract Context build(); - - public int getBias() { - return bias; - } - - public int getCardinality() { - return cardinality; - } - - public double getEpsilon() { - return epsilon; - } - - public Digest getId() { - return id; - } - - public double getpByz() { - return pByz; - } - - public Builder setBias(int bias) { - this.bias = bias; - return this; - } - - public Builder setCardinality(int cardinality) { - this.cardinality = cardinality; - return this; - } - - public Builder setEpsilon(double epsilon) { - this.epsilon = epsilon; - return this; - } - - public Builder setId(Digest id) { - this.id = id; - return this; - } - - public Builder setpByz(double pByz) { - this.pByz = pByz; - return this; - } - } - - interface MembershipListener { - - /** - * A new member has recovered and is now active - * - * @param member - */ - default void active(T member) { - } - - /** - * A member is offline - * - * @param member - */ - default void offline(T member) { - } - } - double DEFAULT_EPSILON = 0.99999; - static final String RING_HASH_TEMPLATE = "%s-%s-%s"; static Digest hashFor(Digest ctxId, int ring, Digest d) { @@ -134,9 +49,8 @@ static int minMajority(double pByz, int card, double epsilon) { } /** - * @return the minimum t such that the probability of more than t out of bias * - * t+1 monitors are correct with probability e/size given the uniform - * probability pByz that a monitor is Byzantine. + * @return the minimum t such that the probability of more than t out of bias * t+1 monitors are correct with + * probability e/size given the uniform probability pByz that a monitor is Byzantine. */ static int minMajority(double pByz, int cardinality, double epsilon, int bias) { if (epsilon > 1.0 || epsilon <= 0.0) { @@ -149,19 +63,19 @@ static int minMajority(double pByz, int cardinality, double epsilon, int bias) { if (cardinality >= (bias * t) + 1) { return t; } else { - throw new IllegalArgumentException("Cardinality: " + cardinality - + " cannot support required tolerance: " + t); + throw new IllegalArgumentException( + "Cardinality: " + cardinality + " cannot support required tolerance: " + t); } } } - throw new IllegalArgumentException("Cannot compute number of rings from bias=" + bias + " pByz=" + pByz - + " cardinality: " + cardinality + " epsilon: " + epsilon); + throw new IllegalArgumentException( + "Cannot compute number of rings from bias=" + bias + " pByz=" + pByz + " cardinality: " + cardinality + + " epsilon: " + epsilon); } /** - * @return the minimum t such that the probability of more than t out of 2t+1 - * monitors are correct with probability e/size given the uniform - * probability pByz that a monitor is Byzantine. + * @return the minimum t such that the probability of more than t out of 2t+1 monitors are correct with probability + * e/size given the uniform probability pByz that a monitor is Byzantine. */ static int minMajority(int bias, double pByz, int cardinality) { return minMajority(pByz, cardinality, 0.99999, bias); @@ -200,9 +114,19 @@ static List uniqueSuccessors(final Context context, Digest diges /** * Mark a member as active in the context + * + * @return true if the member was previously inactive, false if currently active */ boolean activate(T m); + /** + * Mark a member identified by the digest ID as active in the context + * + * @return true if the member was previously inactive, false if currently active + * @throws NoSuchElementException - if no member is found in the context with the supplied ID + */ + boolean activate(Digest id); + /** * Mark a member as active in the context */ @@ -261,8 +185,8 @@ static List uniqueSuccessors(final Context context, Digest diges void deregister(UUID id); /** - * Answer the aproximate diameter of the receiver, assuming the rings were built - * with FF parameters, with the rings forming random graph connections segments. + * Answer the aproximate diameter of the receiver, assuming the rings were built with FF parameters, with the rings + * forming random graph connections segments. */ int diameter(); @@ -277,8 +201,8 @@ static List uniqueSuccessors(final Context context, Digest diges List getAllMembers(); /** - * Answer the bias of the context. The bias is the multiple of the number of - * byzantine members the context is designed to foil + * Answer the bias of the context. The bias is the multiple of the number of byzantine members the context is + * designed to foil */ int getBias(); @@ -324,8 +248,7 @@ static List uniqueSuccessors(final Context context, Digest diges boolean isActive(T m); /** - * Answer true if a member who's id is the supplied digest is a member of the - * view + * Answer true if a member who's id is the supplied digest is a member of the view */ boolean isMember(Digest digest); @@ -346,7 +269,7 @@ static List uniqueSuccessors(final Context context, Digest diges /** * Answer true if the member is a successor of the supplied digest on any ring - * + * * @param member * @param digest * @return @@ -354,8 +277,7 @@ static List uniqueSuccessors(final Context context, Digest diges boolean isSuccessorOf(T m, Digest digest); /** - * Answer the majority cardinality of the context, based on the current ring - * count + * Answer the majority cardinality of the context, based on the current ring count */ int majority(); @@ -371,7 +293,7 @@ static List uniqueSuccessors(final Context context, Digest diges /** * Take a member offline - * + * * @return true if the member was active previously */ boolean offline(T m); @@ -389,8 +311,7 @@ static List uniqueSuccessors(final Context context, Digest diges List predecessors(Digest key); /** - * @return the predecessor on each ring for the provided key that pass the - * provided predicate + * @return the predecessor on each ring for the provided key that pass the provided predicate */ List predecessors(Digest key, Predicate test); @@ -400,8 +321,7 @@ static List uniqueSuccessors(final Context context, Digest diges List predecessors(T key); /** - * @return the predecessor on each ring for the provided key that pass the - * provided predicate + * @return the predecessor on each ring for the provided key that pass the provided predicate */ List predecessors(T key, Predicate test); @@ -446,16 +366,24 @@ static List uniqueSuccessors(final Context context, Digest diges Stream> rings(); /** - * Answer a random sample of at least range size from the active members of the - * context - * + * Answer a random sample of at least range size from the active members of the context + * + * @param range - the desired range + * @param entropy - source o randomness + * @param exc - the member to exclude from sample + * @return a random sample set of the view's live members. May be limited by the number of active members. + */ + List sample(int range, BitsStreamGenerator entropy, Digest exc); + + /** + * Answer a random sample of at least range size from the active members of the context + * * @param range - the desired range * @param entropy - source o randomness * @param excluded - the member to exclude from sample - * @return a random sample set of the view's live members. May be limited by the - * number of active members. + * @return a random sample set of the view's live members. May be limited by the number of active members. */ - List sample(int range, BitsStreamGenerator entropy, Digest exc); + List sample(int range, BitsStreamGenerator entropy, Predicate excluded); /** * Answer the total count of active and offline members of this context @@ -468,8 +396,7 @@ static List uniqueSuccessors(final Context context, Digest diges List successors(Digest key); /** - * @return the list of successor to the key on each ring that pass the provided - * predicate test + * @return the list of successor to the key on each ring that pass the provided predicate test */ List successors(Digest key, Predicate test); @@ -479,20 +406,19 @@ static List uniqueSuccessors(final Context context, Digest diges List successors(T key); /** - * @return the list of successor to the key on each ring that pass the provided - * predicate test + * @return the list of successor to the key on each ring that pass the provided predicate test */ List successors(T key, Predicate test); /** - * The number of iterations until a given message has been distributed to all - * members in the context, using the rings of the receiver as a gossip graph + * The number of iterations until a given message has been distributed to all members in the context, using the + * rings of the receiver as a gossip graph */ int timeToLive(); /** - * Answer the tolerance level of the context to byzantine members, assuming this - * context has been constructed from FF parameters + * Answer the tolerance level of the context to byzantine members, assuming this context has been constructed from + * FF parameters */ int toleranceLevel(); @@ -503,4 +429,78 @@ static List uniqueSuccessors(final Context context, Digest diges boolean validRing(int ring); + interface MembershipListener { + + /** + * A new member has recovered and is now active + * + * @param member + */ + default void active(T member) { + } + + /** + * A member is offline + * + * @param member + */ + default void offline(T member) { + } + } + + abstract class Builder { + protected int bias = 2; + protected int cardinality; + protected double epsilon = DEFAULT_EPSILON; + protected Digest id = DigestAlgorithm.DEFAULT.getOrigin(); + protected double pByz = 0.1; // 10% chance any node is out to get ya + + public abstract Context build(); + + public int getBias() { + return bias; + } + + public Builder setBias(int bias) { + this.bias = bias; + return this; + } + + public int getCardinality() { + return cardinality; + } + + public Builder setCardinality(int cardinality) { + this.cardinality = cardinality; + return this; + } + + public double getEpsilon() { + return epsilon; + } + + public Builder setEpsilon(double epsilon) { + this.epsilon = epsilon; + return this; + } + + public Digest getId() { + return id; + } + + public Builder setId(Digest id) { + this.id = id; + return this; + } + + public double getpByz() { + return pByz; + } + + public Builder setpByz(double pByz) { + this.pByz = pByz; + return this; + } + } + } diff --git a/memberships/src/main/java/com/salesforce/apollo/membership/ContextImpl.java b/memberships/src/main/java/com/salesforce/apollo/membership/ContextImpl.java index f0ad2c7c0a..acdfbc37f3 100644 --- a/memberships/src/main/java/com/salesforce/apollo/membership/ContextImpl.java +++ b/memberships/src/main/java/com/salesforce/apollo/membership/ContextImpl.java @@ -6,14 +6,12 @@ */ package com.salesforce.apollo.membership; -import static com.salesforce.apollo.membership.Context.minMajority; +import com.salesforce.apollo.cryptography.Digest; +import org.apache.commons.math3.random.BitsStreamGenerator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.UUID; +import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentSkipListMap; import java.util.concurrent.atomic.AtomicBoolean; @@ -21,94 +19,29 @@ import java.util.function.Supplier; import java.util.stream.Stream; -import org.apache.commons.math3.random.BitsStreamGenerator; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.salesforce.apollo.cryptography.Digest; +import static com.salesforce.apollo.membership.Context.minMajority; /** - * Provides a Context for Membership and is uniquely identified by a Digest;. - * Members may be either active or offline. The Context maintains a number of - * Rings (may be zero) that the Context provides for Firefly type consistent - * hash ring ordering operators. Each ring has a unique hash of each individual - * member, and thus each ring has a different ring order of the same membership - * set. Hashes for Context level operators include the ID of the ring. Hashes - * computed for each member, per ring include the ID of the enclosing Context. + * Provides a Context for Membership and is uniquely identified by a Digest;. Members may be either active or offline. + * The Context maintains a number of Rings (may be zero) that the Context provides for Firefly type consistent hash ring + * ordering operators. Each ring has a unique hash of each individual member, and thus each ring has a different ring + * order of the same membership set. Hashes for Context level operators include the ID of the ring. Hashes computed for + * each member, per ring include the ID of the enclosing Context. * * @author hal.hildebrand - * */ public class ContextImpl implements Context { - public static class Tracked { - private static final Logger log = LoggerFactory.getLogger(Tracked.class); - - private final AtomicBoolean active = new AtomicBoolean(false); - private Digest[] hashes; - private final M member; - - public Tracked(M member, Supplier hashes) { - this.member = member; - this.hashes = hashes.get(); - } - - public boolean activate() { - var activated = active.compareAndSet(false, true); - if (activated) { - log.trace("Activated: {}", member.getId()); - } - return activated; - } - - public Digest hash(int index) { - return hashes[index]; - } - - public boolean isActive() { - return active.get(); - } - - public M member() { - return member; - } - - public boolean offline() { - var offlined = active.compareAndSet(true, false); - if (offlined) { - log.trace("Offlined: {}", member.getId()); - } - return offlined; - } - - @Override - public String toString() { - return String.format("%s:%s %s", member, active.get(), Arrays.asList(hashes)); - } - - private void rebalance(int ringCount, ContextImpl contextImpl) { - final var newHashes = new Digest[ringCount]; - for (int i = 0; i < Math.min(ringCount, hashes.length); i++) { - newHashes[i] = hashes[i]; - } - for (int i = Math.min(ringCount, hashes.length); i < newHashes.length; i++) { - newHashes[i] = contextImpl.hashFor(member.getId(), i); - } - hashes = newHashes; - } - } - - private static final Logger log = LoggerFactory.getLogger(Context.class); - - private final int bias; - private volatile int cardinality; - private final double epsilon; - private final Digest id; - private final Map> members = new ConcurrentSkipListMap<>(); - private final Map> membershipListeners = new ConcurrentHashMap<>(); - private final double pByz; - - private final List> rings = new ArrayList<>(); + private static final Logger log = LoggerFactory.getLogger( + Context.class); + private final int bias; + private final double epsilon; + private final Digest id; + private final Map> members = new ConcurrentSkipListMap<>(); + private final Map> membershipListeners = new ConcurrentHashMap<>(); + private final double pByz; + private final List> rings = new ArrayList<>(); + private volatile int cardinality; public ContextImpl(Digest id, int cardinality, double pbyz, int bias) { this(id, cardinality, pbyz, bias, DEFAULT_EPSILON); @@ -148,6 +81,29 @@ public boolean activate(T m) { return false; } + /** + * Mark a member identified by the digest ID as active in the context + * + * @return true if the member was previously inactive, false if currently active + */ + public boolean activate(Digest id) { + var m = members.get(id); + if (m == null) { + throw new NoSuchElementException("No member known: " + id); + } + if (m.activate()) { + membershipListeners.values().stream().forEach(l -> { + try { + l.active(m.member); + } catch (Throwable e) { + log.error("error recovering member in listener: " + l, e); + } + }); + return true; + } + return false; + } + /** * Mark a member as active in the context */ @@ -159,7 +115,7 @@ public boolean activateIfMember(T m) { try { l.active(m); } catch (Throwable e) { - log.error("error recoving member in listener: " + l, e); + log.error("error recovering member in listener: " + l, e); } }); return true; @@ -236,8 +192,8 @@ public void deregister(UUID id) { } /** - * Answer the aproximate diameter of the receiver, assuming the rings were built - * with FF parameters, with the rings forming random graph connections segments. + * Answer the aproximate diameter of the receiver, assuming the rings were built with FF parameters, with the rings + * forming random graph connections segments. */ @Override public int diameter() { @@ -444,8 +400,7 @@ public List predecessors(Digest key) { } /** - * @return the predecessor on each ring for the provided key that pass the - * provided predicate + * @return the predecessor on each ring for the provided key that pass the provided predicate */ @Override public List predecessors(Digest key, Predicate test) { @@ -468,8 +423,7 @@ public List predecessors(T key) { } /** - * @return the predecessor on each ring for the provided key that pass the - * provided predicate + * @return the predecessor on each ring for the provided key that pass the provided predicate */ @Override public List predecessors(T key, Predicate test) { @@ -567,18 +521,31 @@ public Stream> rings() { } /** - * Answer a random sample of at least range size from the active members of the - * context + * Answer a random sample of at least range size from the active members of the context * * @param range - the desired range * @param entropy - source o randomness - * @param excluded - the member to exclude from sample - * @return a random sample set of the view's live members. May be limited by the - * number of active members. + * @param excluded - predicate to test for exclusion + * @return a random sample set of the view's live members. May be limited by the number of active members. + */ + @Override + public List sample(int range, BitsStreamGenerator entropy, Predicate excluded) { + return rings.get(entropy.nextInt(rings.size())) + .stream() + .collect(new ReservoirSampler(excluded, range, entropy)); + } + + /** + * Answer a random sample of at least range size from the active members of the context + * + * @param range - the desired range + * @param entropy - source o randomness + * @param exc - the member to exclude from sample + * @return a random sample set of the view's live members. May be limited by the number of active members. */ @Override public List sample(int range, BitsStreamGenerator entropy, Digest exc) { - Member excluded = getMember(exc); + Member excluded = exc == null ? null : getMember(exc); return rings.get(entropy.nextInt(rings.size())) .stream() .collect(new ReservoirSampler(excluded, range, entropy)); @@ -598,8 +565,7 @@ public List successors(Digest key) { } /** - * @return the list of successor to the key on each ring that pass the provided - * predicate test + * @return the list of successor to the key on each ring that pass the provided predicate test */ @Override public List successors(Digest key, Predicate test) { @@ -622,8 +588,7 @@ public List successors(T key) { } /** - * @return the list of successor to the key on each ring that pass the provided - * predicate test + * @return the list of successor to the key on each ring that pass the provided predicate test */ @Override public List successors(T key, Predicate test) { @@ -638,28 +603,28 @@ public List successors(T key, Predicate test) { } /** - * The number of iterations until a given message has been distributed to all - * members in the context, using the rings of the receiver as a gossip graph + * The number of iterations until a given message has been distributed to all members in the context, using the + * rings of the receiver as a gossip graph */ @Override public int timeToLive() { return (rings.size() * diameter()) + 1; } + @Override + public String toString() { + return "Context [" + id + "]"; + } + /** - * Answer the tolerance level of the context to byzantine members, assuming this - * context has been constructed from FF parameters + * Answer the tolerance level of the context to byzantine members, assuming this context has been constructed from + * FF parameters */ @Override public int toleranceLevel() { return (rings.size() - 1) / bias; } - @Override - public String toString() { - return "Context [" + id + "]"; - } - @Override public int totalCount() { return members.size(); @@ -688,4 +653,61 @@ private ContextImpl.Tracked tracking(T m) { }); return tracking; } + + public static class Tracked { + private static final Logger log = LoggerFactory.getLogger(Tracked.class); + + private final AtomicBoolean active = new AtomicBoolean(false); + private final M member; + private Digest[] hashes; + + public Tracked(M member, Supplier hashes) { + this.member = member; + this.hashes = hashes.get(); + } + + public boolean activate() { + var activated = active.compareAndSet(false, true); + if (activated) { + log.trace("Activated: {}", member.getId()); + } + return activated; + } + + public Digest hash(int index) { + return hashes[index]; + } + + public boolean isActive() { + return active.get(); + } + + public M member() { + return member; + } + + public boolean offline() { + var offlined = active.compareAndSet(true, false); + if (offlined) { + log.trace("Offlined: {}", member.getId()); + } + return offlined; + } + + @Override + public String toString() { + return String.format("%s:%s %s", member, active.get(), Arrays.asList(hashes)); + } + + private void rebalance(int ringCount, ContextImpl contextImpl) { + final var newHashes = new Digest[ringCount]; + for (int i = 0; i < Math.min(ringCount, hashes.length); i++) { + newHashes[i] = hashes[i]; + } + for (int i = Math.min(ringCount, hashes.length); i < newHashes.length; i++) { + newHashes[i] = contextImpl.hashFor(member.getId(), i); + } + hashes = newHashes; + } + } } diff --git a/memberships/src/main/java/com/salesforce/apollo/membership/ReservoirSampler.java b/memberships/src/main/java/com/salesforce/apollo/membership/ReservoirSampler.java index feb122fc73..d62dde510a 100644 --- a/memberships/src/main/java/com/salesforce/apollo/membership/ReservoirSampler.java +++ b/memberships/src/main/java/com/salesforce/apollo/membership/ReservoirSampler.java @@ -6,31 +6,32 @@ */ package com.salesforce.apollo.membership; +import org.apache.commons.math3.random.BitsStreamGenerator; + import java.util.ArrayList; import java.util.EnumSet; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.BiConsumer; -import java.util.function.BinaryOperator; -import java.util.function.Function; -import java.util.function.Supplier; +import java.util.function.*; import java.util.stream.Collector; -import org.apache.commons.math3.random.BitsStreamGenerator; - public class ReservoirSampler implements Collector, List> { - private AtomicInteger c = new AtomicInteger(); - private final Object exclude; + private final Predicate exclude; private final BitsStreamGenerator rand; private final int sz; + private AtomicInteger c = new AtomicInteger(); public ReservoirSampler(int size, BitsStreamGenerator entropy) { this(null, size, entropy); } public ReservoirSampler(Object excluded, int size, BitsStreamGenerator entropy) { + this(t -> excluded == null ? false : excluded.equals(t), size, entropy); + } + + public ReservoirSampler(Predicate excluded, int size, BitsStreamGenerator entropy) { assert size >= 0; this.exclude = excluded; this.sz = size; @@ -66,7 +67,7 @@ public Supplier> supplier() { } private void addIt(final List in, T s) { - if (exclude != null && exclude.equals(s)) { + if (exclude != null && exclude.test(s)) { return; } if (in.size() < sz) { diff --git a/memberships/src/main/java/com/salesforce/apollo/membership/messaging/rbc/ReliableBroadcaster.java b/memberships/src/main/java/com/salesforce/apollo/membership/messaging/rbc/ReliableBroadcaster.java index b93c4c77bd..de5ae9bb4c 100644 --- a/memberships/src/main/java/com/salesforce/apollo/membership/messaging/rbc/ReliableBroadcaster.java +++ b/memberships/src/main/java/com/salesforce/apollo/membership/messaging/rbc/ReliableBroadcaster.java @@ -10,8 +10,6 @@ import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.Message; -import com.salesfoce.apollo.cryptography.proto.Biff; -import com.salesfoce.apollo.messaging.proto.*; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.RouterImpl.CommonCommunications; import com.salesforce.apollo.bloomFilters.BloomFilter; @@ -20,13 +18,16 @@ import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.JohnHancock; +import com.salesforce.apollo.cryptography.proto.Biff; import com.salesforce.apollo.membership.Context; import com.salesforce.apollo.membership.Member; import com.salesforce.apollo.membership.SigningMember; import com.salesforce.apollo.membership.messaging.rbc.comms.RbcServer; import com.salesforce.apollo.membership.messaging.rbc.comms.ReliableBroadcast; +import com.salesforce.apollo.messaging.proto.*; import com.salesforce.apollo.ring.RingCommunications; import com.salesforce.apollo.utils.Entropy; +import com.salesforce.apollo.utils.Utils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -187,7 +188,8 @@ public void start(Duration duration) { log.info("Starting Reliable Broadcaster[{}] for {}", context.getId(), member.getId()); comm.register(context.getId(), new Service()); var scheduler = Executors.newScheduledThreadPool(2, Thread.ofVirtual().factory()); - scheduler.schedule(() -> oneRound(duration, scheduler), initialDelay, TimeUnit.MILLISECONDS); + scheduler.schedule(() -> Thread.ofVirtual().start(Utils.wrapped(() -> oneRound(duration, scheduler), log)), + initialDelay, TimeUnit.MILLISECONDS); } public void stop() { @@ -251,7 +253,9 @@ private void handle(Optional result, } if (started.get()) { try { - scheduler.schedule(() -> oneRound(duration, scheduler), duration.toMillis(), TimeUnit.MILLISECONDS); + scheduler.schedule( + () -> Thread.ofVirtual().start(Utils.wrapped(() -> oneRound(duration, scheduler), log)), + duration.toMillis(), TimeUnit.MILLISECONDS); } catch (RejectedExecutionException e) { return; } diff --git a/memberships/src/main/java/com/salesforce/apollo/membership/messaging/rbc/comms/RbcClient.java b/memberships/src/main/java/com/salesforce/apollo/membership/messaging/rbc/comms/RbcClient.java index 27ec7f85bf..d49ae2cb08 100644 --- a/memberships/src/main/java/com/salesforce/apollo/membership/messaging/rbc/comms/RbcClient.java +++ b/memberships/src/main/java/com/salesforce/apollo/membership/messaging/rbc/comms/RbcClient.java @@ -7,10 +7,10 @@ package com.salesforce.apollo.membership.messaging.rbc.comms; import com.codahale.metrics.Timer.Context; -import com.salesfoce.apollo.messaging.proto.MessageBff; -import com.salesfoce.apollo.messaging.proto.RBCGrpc; -import com.salesfoce.apollo.messaging.proto.Reconcile; -import com.salesfoce.apollo.messaging.proto.ReconcileContext; +import com.salesforce.apollo.messaging.proto.MessageBff; +import com.salesforce.apollo.messaging.proto.RBCGrpc; +import com.salesforce.apollo.messaging.proto.Reconcile; +import com.salesforce.apollo.messaging.proto.ReconcileContext; import com.salesforce.apollo.archipelago.ManagedServerChannel; import com.salesforce.apollo.archipelago.ServerConnectionCache.CreateClientCommunications; import com.salesforce.apollo.membership.Member; @@ -22,9 +22,9 @@ */ public class RbcClient implements ReliableBroadcast { - private final ManagedServerChannel channel; + private final ManagedServerChannel channel; private final RBCGrpc.RBCBlockingStub client; - private final RbcMetrics metrics; + private final RbcMetrics metrics; public RbcClient(ManagedServerChannel c, RbcMetrics metrics) { this.channel = c; diff --git a/memberships/src/main/java/com/salesforce/apollo/membership/messaging/rbc/comms/RbcServer.java b/memberships/src/main/java/com/salesforce/apollo/membership/messaging/rbc/comms/RbcServer.java index 09959de274..b361fac693 100644 --- a/memberships/src/main/java/com/salesforce/apollo/membership/messaging/rbc/comms/RbcServer.java +++ b/memberships/src/main/java/com/salesforce/apollo/membership/messaging/rbc/comms/RbcServer.java @@ -8,10 +8,10 @@ import com.codahale.metrics.Timer.Context; import com.google.protobuf.Empty; -import com.salesfoce.apollo.messaging.proto.MessageBff; -import com.salesfoce.apollo.messaging.proto.RBCGrpc.RBCImplBase; -import com.salesfoce.apollo.messaging.proto.Reconcile; -import com.salesfoce.apollo.messaging.proto.ReconcileContext; +import com.salesforce.apollo.messaging.proto.MessageBff; +import com.salesforce.apollo.messaging.proto.RBCGrpc.RBCImplBase; +import com.salesforce.apollo.messaging.proto.Reconcile; +import com.salesforce.apollo.messaging.proto.ReconcileContext; import com.salesforce.apollo.archipelago.RoutableService; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.membership.messaging.rbc.RbcMetrics; @@ -22,12 +22,11 @@ /** * @author hal.hildebrand - * */ public class RbcServer extends RBCImplBase { - private ClientIdentity identity; private final RbcMetrics metrics; private final RoutableService routing; + private ClientIdentity identity; public RbcServer(ClientIdentity identity, RbcMetrics metrics, RoutableService r) { this.metrics = metrics; diff --git a/memberships/src/main/java/com/salesforce/apollo/membership/messaging/rbc/comms/ReliableBroadcast.java b/memberships/src/main/java/com/salesforce/apollo/membership/messaging/rbc/comms/ReliableBroadcast.java index 6896a8a4f2..2bb1a36664 100644 --- a/memberships/src/main/java/com/salesforce/apollo/membership/messaging/rbc/comms/ReliableBroadcast.java +++ b/memberships/src/main/java/com/salesforce/apollo/membership/messaging/rbc/comms/ReliableBroadcast.java @@ -9,16 +9,15 @@ import java.io.IOException; import com.google.common.util.concurrent.ListenableFuture; -import com.salesfoce.apollo.messaging.proto.MessageBff; -import com.salesfoce.apollo.messaging.proto.Reconcile; -import com.salesfoce.apollo.messaging.proto.ReconcileContext; +import com.salesforce.apollo.messaging.proto.MessageBff; +import com.salesforce.apollo.messaging.proto.Reconcile; +import com.salesforce.apollo.messaging.proto.ReconcileContext; import com.salesforce.apollo.archipelago.Link; import com.salesforce.apollo.membership.Member; import com.salesforce.apollo.membership.SigningMember; /** * @author hal.hildebrand - * */ public interface ReliableBroadcast extends Link { diff --git a/memberships/src/main/java/com/salesforce/apollo/membership/stereotomy/ControlledIdentifierMember.java b/memberships/src/main/java/com/salesforce/apollo/membership/stereotomy/ControlledIdentifierMember.java index 7768a7ad58..5ff68b7b18 100644 --- a/memberships/src/main/java/com/salesforce/apollo/membership/stereotomy/ControlledIdentifierMember.java +++ b/memberships/src/main/java/com/salesforce/apollo/membership/stereotomy/ControlledIdentifierMember.java @@ -6,7 +6,7 @@ */ package com.salesforce.apollo.membership.stereotomy; -import com.salesfoce.apollo.stereotomy.event.proto.KERL_; +import com.salesforce.apollo.stereotomy.event.proto.KERL_; import com.salesforce.apollo.cryptography.*; import com.salesforce.apollo.cryptography.cert.CertificateWithPrivateKey; import com.salesforce.apollo.membership.Member; diff --git a/memberships/src/main/java/com/salesforce/apollo/ring/RingCommunications.java b/memberships/src/main/java/com/salesforce/apollo/ring/RingCommunications.java index 4047d31268..c3d2649151 100644 --- a/memberships/src/main/java/com/salesforce/apollo/ring/RingCommunications.java +++ b/memberships/src/main/java/com/salesforce/apollo/ring/RingCommunications.java @@ -32,17 +32,17 @@ * @author hal.hildebrand */ public class RingCommunications { - private final static Logger log = LoggerFactory.getLogger( - RingCommunications.class); - final Context context; - final SigningMember member; - private final CommonCommunications comm; - private final Direction direction; - private final boolean ignoreSelf; - private final Lock lock = new ReentrantLock(); - private final List> traversalOrder = new ArrayList<>(); - protected boolean noDuplicates = false; - volatile int currentIndex = -1; + private final static Logger log = LoggerFactory.getLogger(RingCommunications.class); + + final Context context; + final SigningMember member; + private final CommonCommunications comm; + private final Direction direction; + private final boolean ignoreSelf; + private final Lock lock = new ReentrantLock(); + private final List> traversalOrder = new ArrayList<>(); + protected boolean noDuplicates = false; + volatile int currentIndex = -1; public RingCommunications(Context context, SigningMember member, CommonCommunications comm) { this(context, member, comm, false); @@ -94,10 +94,6 @@ public String toString() { return "RingCommunications [" + context.getId() + ":" + member.getId() + ":" + currentIndex + "]"; } - protected Logger getLog() { - return log; - } - @SuppressWarnings("unchecked") List> calculateTraversal(Digest digest) { var traversal = new ArrayList>(); @@ -145,6 +141,10 @@ final RingCommunications.Destination next(Digest digest) { } } + protected Logger getLog() { + return log; + } + private void execute(BiFunction round, SyncHandler handler, Destination destination) { if (destination.link == null) { @@ -154,7 +154,7 @@ private void execute(BiFunction round, SyncHandler void iterate(Digest digest, Runnable onMajority, BiFunction(); Thread.ofVirtual() - .factory() - .newThread( - () -> internalIterate(digest, onMajority, round, failedMajority, handler, onComplete, tally, traversed)) - .start(); + .start( + () -> internalIterate(digest, onMajority, round, failedMajority, handler, onComplete, tally, traversed)); } @@ -93,11 +91,6 @@ public RingIterator noDuplicates() { return this; } - @Override - protected Logger getLog() { - return log; - } - private void internalIterate(Digest digest, Runnable onMajority, BiFunction round, Runnable failedMajority, ResultConsumer handler, Consumer onComplete, AtomicInteger tally, Set traversed) { @@ -114,47 +107,48 @@ private void internalIterate(Digest digest, Runnable onMajority, BiFunction< } var next = next(digest); - log.trace("Iteration: {} tally: {} for: {} on: {} ring: {} complete: false on: {}", iteration(), tally.get(), - digest, context.getId(), next.ring(), member.getId()); + log.trace("Iteration: {} tally: {} for digest: {} on: {} ring: {} complete: false on: {}", iteration(), + tally.get(), digest, context.getId(), next.ring(), member.getId()); if (next.link() == null) { - log.trace("No successor found of: {} on: {} iteration: {} traversed: {} ring: {} on: {}", digest, + log.trace("No successor found for digest: {} on: {} iteration: {} traversed: {} ring: {} on: {}", digest, context.getId(), iteration(), traversed, context.ring(currentIndex).stream().toList(), member.getId()); final boolean allow = handler.handle(tally, Optional.empty(), next); allowed.accept(allow); if (allow) { - log.trace("Finished on iteration: {} proceeding on: {} for: {} tally: {} on: {}", iteration(), digest, - context.getId(), tally.get(), member.getId()); + log.trace("Finished on iteration: {} proceeding on: {} for digest: {} tally: {} on: {}", iteration(), + digest, context.getId(), tally.get(), member.getId()); schedule(proceed); } else { - log.trace("Completed on iteration: {} on: {} for: {} for: {} tally: {} on: {}", iteration(), digest, - context.getId(), tally.get(), member.getId()); + log.trace("Completed on iteration: {} on: {} for digest: {} for: {} tally: {} on: {}", iteration(), + digest, context.getId(), tally.get(), member.getId()); } return; } try (Comm link = next.link()) { - log.trace("Continuation on iteration: {} tally: {} for: {} on: {} ring: {} to: {} on: {}", iteration(), - tally.get(), digest, context.getId(), next.ring(), + log.trace("Continuation on iteration: {} tally: {} for digest: {} on: {} ring: {} to: {} on: {}", + iteration(), tally.get(), digest, context.getId(), next.ring(), link.getMember() == null ? null : link.getMember().getId(), member.getId()); Q result = null; try { result = round.apply(link, next.ring()); } catch (Throwable e) { - log.trace("Exception in round for: {} on: {} iteration: {} from: {} on: {}", digest, context.getId(), - iteration(), link.getMember() == null ? null : link.getMember().getId(), member.getId()); + log.trace("Exception in round for digest: {} context: {} iteration: {} from: {} on: {}", digest, + context.getId(), iteration(), link.getMember() == null ? null : link.getMember().getId(), + member.getId(), e); } if (result == null) { - log.trace("No asynchronous response for: {} on: {} iteration: {} from: {} on: {}", digest, + log.trace("No asynchronous response for digest: {} on: {} iteration: {} from: {} on: {}", digest, context.getId(), iteration(), link.getMember() == null ? null : link.getMember().getId(), member.getId()); final boolean allow = handler.handle(tally, Optional.empty(), next); allowed.accept(allow); if (allow) { - log.trace("Proceeding on iteration: {} on: {} for: {} tally: {} on: {}", iteration(), digest, + log.trace("Proceeding on iteration: {} on: {} for digest: {} tally: {} on: {}", iteration(), digest, context.getId(), tally.get(), member.getId()); schedule(proceed); } else { - log.trace("Completed on iteration: {} on: {} for: {} tally: {} on: {}", iteration(), digest, + log.trace("Completed on iteration: {} on: {} for digest: {} tally: {} on: {}", iteration(), digest, context.getId(), tally.get(), member.getId()); } return; @@ -162,11 +156,11 @@ private void internalIterate(Digest digest, Runnable onMajority, BiFunction< final var allow = handler.handle(tally, Optional.of(result), next); allowed.accept(allow); if (allow) { - log.trace("Scheduling next iteration: {} on: {} for: {} tally: {} on: {}", iteration(), digest, + log.trace("Scheduling next iteration: {} on: {} for digest: {} tally: {} on: {}", iteration(), digest, context.getId(), tally.get(), member.getId()); schedule(proceed); } else { - log.trace("Finished on iteration: {} on: {} for: {} tally: {} on: {}", iteration(), digest, + log.trace("Finished on iteration: {} on: {} for digest: {} tally: {} on: {}", iteration(), digest, context.getId(), tally.get(), member.getId()); } } catch (IOException e) { @@ -179,16 +173,16 @@ private void proceed(Digest key, final boolean allow, Runnable onMajority, Runna final var current = currentIndex; if (!finalIteration) { log.trace( - "Determining: {} continuation of: {} for: {} tally: {} majority: {} final itr: {} allow: {} on: {}", + "Determining: {} continuation of: {} for digest: {} tally: {} majority: {} final itr: {} allow: {} on: {}", current, key, context.getId(), tally.get(), context.majority(), finalIteration, allow, member.getId()); } if (finalIteration && allow) { - log.trace("Completing iteration: {} of: {} for: {} tally: {} on: {}", iteration(), key, context.getId(), - tally.get(), member.getId()); + log.trace("Completing iteration: {} of: {} for digest: {} tally: {} on: {}", iteration(), key, + context.getId(), tally.get(), member.getId()); if (failedMajority != null && !majorityFailed) { if (tally.get() < context.majority()) { majorityFailed = true; - log.debug("Failed to obtain majority of: {} for: {} tally: {} required: {} on: {}", key, + log.debug("Failed to obtain majority of: {} for digest: {} tally: {} required: {} on: {}", key, context.getId(), tally.get(), context.majority(), member.getId()); failedMajority.run(); } @@ -197,13 +191,14 @@ private void proceed(Digest key, final boolean allow, Runnable onMajority, Runna onComplete.accept(tally.get()); } } else if (!allow) { - log.trace("Termination of: {} for: {} tally: {} on: {}", key, context.getId(), tally.get(), member.getId()); + log.trace("Termination of: {} for digest: {} tally: {} on: {}", key, context.getId(), tally.get(), + member.getId()); } else { if (onMajority != null && !majoritySucceed) { if (tally.get() >= context.majority()) { majoritySucceed = true; - log.debug("Obtained: {} majority of: {} for: {} tally: {} on: {}", current, key, context.getId(), - tally.get(), member.getId()); + log.debug("Obtained: {} majority of: {} for digest: {} tally: {} on: {}", current, key, + context.getId(), tally.get(), member.getId()); onMajority.run(); } } @@ -211,6 +206,7 @@ private void proceed(Digest key, final boolean allow, Runnable onMajority, Runna } private void schedule(Runnable proceed) { - scheduler.schedule(Utils.wrapped(proceed, log), frequency.toNanos(), TimeUnit.NANOSECONDS); + scheduler.schedule(() -> Thread.ofVirtual().start(Utils.wrapped(proceed, log)), frequency.toNanos(), + TimeUnit.NANOSECONDS); } } diff --git a/memberships/src/main/java/com/salesforce/apollo/ring/SliceIterator.java b/memberships/src/main/java/com/salesforce/apollo/ring/SliceIterator.java index a6a7ce0e08..9d3d987492 100644 --- a/memberships/src/main/java/com/salesforce/apollo/ring/SliceIterator.java +++ b/memberships/src/main/java/com/salesforce/apollo/ring/SliceIterator.java @@ -47,15 +47,14 @@ public SliceIterator(String label, SigningMember member, List this.comm = comm; Entropy.secureShuffle(slice); this.currentIteration = slice.iterator(); - log.debug("Slice: {}", slice.stream().map(m -> m.getId()).toList()); + log.debug("Slice for: <{}> is: {} on: {}", label, slice.stream().map(m -> m.getId()).toList(), member.getId()); } public void iterate(BiFunction round, SlicePredicateHandler handler, Runnable onComplete, ScheduledExecutorService scheduler, Duration frequency) { + log.trace("Starting iteration of: <{}> on: {}", label, member.getId()); Thread.ofVirtual() - .factory() - .newThread(Utils.wrapped(() -> internalIterate(round, handler, onComplete, scheduler, frequency), log)) - .start(); + .start(Utils.wrapped(() -> internalIterate(round, handler, onComplete, scheduler, frequency), log)); } public void iterate(BiFunction round, SlicePredicateHandler handler, @@ -70,16 +69,20 @@ private void internalIterate(BiFunction round, SlicePredica Consumer allowed = allow -> proceed(allow, proceed, onComplete, scheduler, frequency); try (Comm link = next()) { if (link == null) { + log.trace("No link for iteration of: <{}> on: {}", label, member.getId()); allowed.accept(handler.handle(Optional.empty(), link, slice.get(slice.size() - 1))); return; } - log.trace("Iteration on: {} index: {} to: {} on: {}", label, current.getId(), link.getMember(), - member.getId()); + log.trace("Iteration of: <{}> to: {} on: {}", label, link.getMember().getId(), member.getId()); T result = null; try { result = round.apply(link, link.getMember()); } catch (StatusRuntimeException e) { - log.trace("Error applying round", e); + log.trace("Error: {} applying: <{}> slice to: {} on: {}", e, label, link.getMember().getId(), + member.getId()); + } catch (Throwable e) { + log.error("Unhandled: {} applying: <{}> slice to: {} on: {}", e, label, link.getMember().getId(), + member.getId()); } allowed.accept(handler.handle(Optional.ofNullable(result), link, link.getMember())); } catch (IOException e) { @@ -91,8 +94,8 @@ private Comm linkFor(Member m) { try { return comm.connect(m); } catch (Throwable e) { - log.error("error opening connection to {}: {}", m.getId(), - (e.getCause() != null ? e.getCause() : e).getMessage()); + log.error("error opening connection of: <{}> to {}: {} on: {}", label, m.getId(), + (e.getCause() != null ? e.getCause() : e).getMessage(), member.getId()); } return null; } @@ -108,19 +111,20 @@ private Comm next() { private void proceed(final boolean allow, Runnable proceed, Runnable onComplete, ScheduledExecutorService scheduler, Duration frequency) { - log.trace("Determining continuation for: {} final itr: {} allow: {} on: {}", label, !currentIteration.hasNext(), - allow, member.getId()); + log.trace("Determining continuation for: <{}> final itr: {} allow: {} on: {}", label, + !currentIteration.hasNext(), allow, member.getId()); if (!currentIteration.hasNext() && allow) { - log.trace("Final iteration of: {} on: {}", label, member.getId()); + log.trace("Final iteration of: <{}> on: {}", label, member.getId()); if (onComplete != null) { log.trace("Completing iteration for: {} on: {}", label, member.getId()); onComplete.run(); } } else if (allow) { - log.trace("Proceeding for: {} on: {}", label, member.getId()); - scheduler.schedule(Utils.wrapped(proceed, log), frequency.toNanos(), TimeUnit.NANOSECONDS); + log.trace("Proceeding for: <{}> on: {}", label, member.getId()); + scheduler.schedule(() -> Thread.ofVirtual().start(Utils.wrapped(proceed, log)), frequency.toNanos(), + TimeUnit.NANOSECONDS); } else { - log.trace("Termination for: {} on: {}", label, member.getId()); + log.trace("Termination for: <{}> on: {}", label, member.getId()); } } diff --git a/memberships/src/test/java/com/salesforce/apollo/archipeligo/DemultiplexerTest.java b/memberships/src/test/java/com/salesforce/apollo/archipeligo/DemultiplexerTest.java index 155f23b96d..3d061fad39 100644 --- a/memberships/src/test/java/com/salesforce/apollo/archipeligo/DemultiplexerTest.java +++ b/memberships/src/test/java/com/salesforce/apollo/archipeligo/DemultiplexerTest.java @@ -9,14 +9,14 @@ import com.google.common.primitives.Ints; import com.google.protobuf.Any; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.test.proto.ByteMessage; -import com.salesfoce.apollo.test.proto.PeerCreds; -import com.salesfoce.apollo.test.proto.TestItGrpc; -import com.salesfoce.apollo.test.proto.TestItGrpc.TestItImplBase; import com.salesforce.apollo.archipelago.Demultiplexer; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.comm.grpc.DomainSocketServerInterceptor; import com.salesforce.apollo.cryptography.DigestAlgorithm; +import com.salesforce.apollo.test.proto.ByteMessage; +import com.salesforce.apollo.test.proto.PeerCreds; +import com.salesforce.apollo.test.proto.TestItGrpc; +import com.salesforce.apollo.test.proto.TestItGrpc.TestItImplBase; import io.grpc.ManagedChannel; import io.grpc.Server; import io.grpc.Status; @@ -56,13 +56,14 @@ */ public class DemultiplexerTest { - private static final Class channelType = IMPL.getChannelType(); - private static final Executor executor = Executors.newVirtualThreadPerTaskExecutor(); - private final EventLoopGroup eventLoopGroup = IMPL.getEventLoopGroup(); - private final List opened = new ArrayList<>(); - private Server serverA; - private Server serverB; - private Demultiplexer terminus; + private static final Class channelType = IMPL.getChannelType(); + private static final Executor executor = Executors.newVirtualThreadPerTaskExecutor(); + + private final EventLoopGroup eventLoopGroup = IMPL.getEventLoopGroup(); + private final List opened = new ArrayList<>(); + private Server serverA; + private Server serverB; + private Demultiplexer terminus; @AfterEach public void after() throws InterruptedException { diff --git a/memberships/src/test/java/com/salesforce/apollo/archipeligo/EnclaveTest.java b/memberships/src/test/java/com/salesforce/apollo/archipeligo/EnclaveTest.java index c2b935cf5e..6aa17a496d 100644 --- a/memberships/src/test/java/com/salesforce/apollo/archipeligo/EnclaveTest.java +++ b/memberships/src/test/java/com/salesforce/apollo/archipeligo/EnclaveTest.java @@ -8,16 +8,16 @@ import com.google.protobuf.Any; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.test.proto.ByteMessage; -import com.salesfoce.apollo.test.proto.TestItGrpc; -import com.salesfoce.apollo.test.proto.TestItGrpc.TestItBlockingStub; -import com.salesfoce.apollo.test.proto.TestItGrpc.TestItImplBase; import com.salesforce.apollo.archipelago.*; import com.salesforce.apollo.archipelago.RouterImpl.CommonCommunications; import com.salesforce.apollo.comm.grpc.DomainSocketServerInterceptor; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.membership.Member; import com.salesforce.apollo.membership.impl.SigningMemberImpl; +import com.salesforce.apollo.test.proto.ByteMessage; +import com.salesforce.apollo.test.proto.TestItGrpc; +import com.salesforce.apollo.test.proto.TestItGrpc.TestItBlockingStub; +import com.salesforce.apollo.test.proto.TestItGrpc.TestItImplBase; import com.salesforce.apollo.utils.Utils; import io.grpc.ManagedChannel; import io.grpc.netty.DomainSocketNegotiatorHandler.DomainSocketNegotiator; @@ -52,7 +52,8 @@ public class EnclaveTest { private final static Class channelType = IMPL.getChannelType(); private static final Executor executor = Executors.newVirtualThreadPerTaskExecutor(); - private final TestItService local = new TestItService() { + + private final TestItService local = new TestItService() { @Override public void close() throws IOException { @@ -68,7 +69,7 @@ public Any ping(Any request) { return null; } }; - private EventLoopGroup eventLoopGroup; + private EventLoopGroup eventLoopGroup; @AfterEach public void after() throws Exception { diff --git a/memberships/src/test/java/com/salesforce/apollo/archipeligo/LocalServerTest.java b/memberships/src/test/java/com/salesforce/apollo/archipeligo/LocalServerTest.java index c824664349..dfa0388b3a 100644 --- a/memberships/src/test/java/com/salesforce/apollo/archipeligo/LocalServerTest.java +++ b/memberships/src/test/java/com/salesforce/apollo/archipeligo/LocalServerTest.java @@ -8,10 +8,10 @@ import com.google.protobuf.Any; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.test.proto.ByteMessage; -import com.salesfoce.apollo.test.proto.TestItGrpc; -import com.salesfoce.apollo.test.proto.TestItGrpc.TestItBlockingStub; -import com.salesfoce.apollo.test.proto.TestItGrpc.TestItImplBase; +import com.salesforce.apollo.test.proto.ByteMessage; +import com.salesforce.apollo.test.proto.TestItGrpc; +import com.salesforce.apollo.test.proto.TestItGrpc.TestItBlockingStub; +import com.salesforce.apollo.test.proto.TestItGrpc.TestItImplBase; import com.salesforce.apollo.archipelago.*; import com.salesforce.apollo.archipelago.RouterImpl.CommonCommunications; import com.salesforce.apollo.cryptography.DigestAlgorithm; diff --git a/memberships/src/test/java/com/salesforce/apollo/archipeligo/RouterTest.java b/memberships/src/test/java/com/salesforce/apollo/archipeligo/RouterTest.java index 34108b5201..da39e7b1db 100644 --- a/memberships/src/test/java/com/salesforce/apollo/archipeligo/RouterTest.java +++ b/memberships/src/test/java/com/salesforce/apollo/archipeligo/RouterTest.java @@ -8,10 +8,10 @@ import com.google.protobuf.Any; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.test.proto.ByteMessage; -import com.salesfoce.apollo.test.proto.TestItGrpc; -import com.salesfoce.apollo.test.proto.TestItGrpc.TestItBlockingStub; -import com.salesfoce.apollo.test.proto.TestItGrpc.TestItImplBase; +import com.salesforce.apollo.test.proto.ByteMessage; +import com.salesforce.apollo.test.proto.TestItGrpc; +import com.salesforce.apollo.test.proto.TestItGrpc.TestItBlockingStub; +import com.salesforce.apollo.test.proto.TestItGrpc.TestItImplBase; import com.salesforce.apollo.archipelago.*; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.membership.Member; diff --git a/memberships/src/test/java/com/salesforce/apollo/membership/messaging/rbc/RbcTest.java b/memberships/src/test/java/com/salesforce/apollo/membership/messaging/rbc/RbcTest.java index a88155c451..5efd0ecc99 100644 --- a/memberships/src/test/java/com/salesforce/apollo/membership/messaging/rbc/RbcTest.java +++ b/memberships/src/test/java/com/salesforce/apollo/membership/messaging/rbc/RbcTest.java @@ -10,7 +10,7 @@ import com.codahale.metrics.MetricRegistry; import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; -import com.salesfoce.apollo.test.proto.ByteMessage; +import com.salesforce.apollo.test.proto.ByteMessage; import com.salesforce.apollo.archipelago.LocalServer; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.ServerConnectionCache; diff --git a/memberships/src/test/java/com/salesforce/apollo/ring/ServerImpl.java b/memberships/src/test/java/com/salesforce/apollo/ring/ServerImpl.java index d34399a049..ceccb94d76 100644 --- a/memberships/src/test/java/com/salesforce/apollo/ring/ServerImpl.java +++ b/memberships/src/test/java/com/salesforce/apollo/ring/ServerImpl.java @@ -1,7 +1,7 @@ package com.salesforce.apollo.ring; import com.google.protobuf.Any; -import com.salesfoce.apollo.test.proto.TestItGrpc; +import com.salesforce.apollo.test.proto.TestItGrpc; import com.salesforce.apollo.archipelago.RoutableService; import io.grpc.stub.StreamObserver; @@ -16,4 +16,4 @@ public ServerImpl(RoutableService router) { public void ping(Any request, StreamObserver responseObserver) { router.evaluate(responseObserver, t -> t.ping(request, responseObserver)); } -} \ No newline at end of file +} diff --git a/memberships/src/test/java/com/salesforce/apollo/ring/ServiceImpl.java b/memberships/src/test/java/com/salesforce/apollo/ring/ServiceImpl.java index d168130394..2602f33186 100644 --- a/memberships/src/test/java/com/salesforce/apollo/ring/ServiceImpl.java +++ b/memberships/src/test/java/com/salesforce/apollo/ring/ServiceImpl.java @@ -2,17 +2,17 @@ import com.google.protobuf.Any; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.test.proto.ByteMessage; +import com.salesforce.apollo.test.proto.ByteMessage; import io.grpc.stub.StreamObserver; /** * @author hal.hildebrand **/ -public class ServiceImpl implements TestIt { - private final TestItService local; - private final String response; +public class ServiceImpl implements TestIt { + private final TestItService local; + private final String response; - public ServiceImpl( TestItService local, String response) { + public ServiceImpl(TestItService local, String response) { this.local = local; this.response = response; } @@ -20,7 +20,8 @@ public ServiceImpl( TestItService local, String response) { @Override public void ping(Any request, StreamObserver responseObserver) { local.ping(request); - responseObserver.onNext(Any.pack(ByteMessage.newBuilder().setContents(ByteString.copyFromUtf8(response)).build())); + responseObserver.onNext( + Any.pack(ByteMessage.newBuilder().setContents(ByteString.copyFromUtf8(response)).build())); responseObserver.onCompleted(); } } diff --git a/memberships/src/test/java/com/salesforce/apollo/ring/TestItClient.java b/memberships/src/test/java/com/salesforce/apollo/ring/TestItClient.java index 157e393690..916ef65c75 100644 --- a/memberships/src/test/java/com/salesforce/apollo/ring/TestItClient.java +++ b/memberships/src/test/java/com/salesforce/apollo/ring/TestItClient.java @@ -1,7 +1,7 @@ package com.salesforce.apollo.ring; import com.google.protobuf.Any; -import com.salesfoce.apollo.test.proto.TestItGrpc; +import com.salesforce.apollo.test.proto.TestItGrpc; import com.salesforce.apollo.archipelago.ManagedServerChannel; import com.salesforce.apollo.membership.Member; @@ -10,9 +10,9 @@ /** * @author hal.hildebrand **/ -public class TestItClient implements TestItService { +public class TestItClient implements TestItService { private final TestItGrpc.TestItBlockingStub client; - private final ManagedServerChannel connection; + private final ManagedServerChannel connection; public TestItClient(ManagedServerChannel c) { this.connection = c; diff --git a/model/pom.xml b/model/pom.xml index 8ca8bbc730..3b9acd54ac 100644 --- a/model/pom.xml +++ b/model/pom.xml @@ -8,7 +8,7 @@ model Model - Sytem Domain model for applications, deployments, tenants, etc. + System Domain model for applications, deployments, tenants, etc. com.salesforce.apollo @@ -40,7 +40,9 @@ org.scijava native-lib-loader + + org.hamcrest hamcrest @@ -74,7 +76,6 @@ - org.apache.maven.plugins maven-antrun-plugin @@ -279,4 +280,42 @@ + + + + + mac-domain + + + mac + + + + + io.netty + netty-transport-native-kqueue + ${netty.version} + ${os.detected.classifier} + test + + + + + linux-domain + + + linux + + + + + io.netty + netty-transport-native-epoll + ${netty.version} + ${os.detected.classifier} + test + + + + diff --git a/model/src/main/java/com/salesforce/apollo/model/Domain.java b/model/src/main/java/com/salesforce/apollo/model/Domain.java index 622c6962e0..87112adb73 100644 --- a/model/src/main/java/com/salesforce/apollo/model/Domain.java +++ b/model/src/main/java/com/salesforce/apollo/model/Domain.java @@ -7,39 +7,33 @@ package com.salesforce.apollo.model; import com.google.protobuf.Message; -import com.salesfoce.apollo.choam.proto.Join; -import com.salesfoce.apollo.choam.proto.Transaction; -import com.salesfoce.apollo.state.proto.Migration; -import com.salesfoce.apollo.state.proto.Txn; -import com.salesfoce.apollo.stereotomy.event.proto.Attachment; -import com.salesfoce.apollo.stereotomy.event.proto.AttachmentEvent; -import com.salesfoce.apollo.stereotomy.event.proto.KERL_; -import com.salesfoce.apollo.stereotomy.event.proto.KeyEventWithAttachments; import com.salesforce.apollo.choam.CHOAM; import com.salesforce.apollo.choam.Parameters; import com.salesforce.apollo.choam.Parameters.RuntimeParameters; -import com.salesforce.apollo.cryptography.Digest; +import com.salesforce.apollo.choam.proto.Join; +import com.salesforce.apollo.choam.proto.Transaction; import com.salesforce.apollo.cryptography.DigestAlgorithm; +import com.salesforce.apollo.cryptography.SignatureAlgorithm; import com.salesforce.apollo.cryptography.Signer; import com.salesforce.apollo.delphinius.Oracle; import com.salesforce.apollo.membership.Context; import com.salesforce.apollo.membership.Member; import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; import com.salesforce.apollo.model.delphinius.ShardedOracle; -import com.salesforce.apollo.model.stereotomy.ShardedKERL; import com.salesforce.apollo.state.Mutator; import com.salesforce.apollo.state.SqlStateMachine; +import com.salesforce.apollo.state.proto.Migration; +import com.salesforce.apollo.state.proto.Txn; import com.salesforce.apollo.stereotomy.ControlledIdentifier; -import com.salesforce.apollo.stereotomy.KERL; import com.salesforce.apollo.stereotomy.KERL.EventWithAttachments; +import com.salesforce.apollo.stereotomy.event.proto.Attachment; +import com.salesforce.apollo.stereotomy.event.proto.AttachmentEvent; +import com.salesforce.apollo.stereotomy.event.proto.KERL_; +import com.salesforce.apollo.stereotomy.event.proto.KeyEventWithAttachments; import com.salesforce.apollo.stereotomy.event.protobuf.InteractionEventImpl; import com.salesforce.apollo.stereotomy.event.protobuf.ProtobufEventFactory; import com.salesforce.apollo.stereotomy.identifier.Identifier; import com.salesforce.apollo.stereotomy.identifier.SelfAddressingIdentifier; -import com.salesforce.apollo.stereotomy.services.proto.ProtoKERLAdapter; -import org.jooq.DSLContext; -import org.jooq.SQLDialect; -import org.jooq.impl.DSL; import org.joou.ULong; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -57,8 +51,6 @@ import java.util.concurrent.Executors; import static com.salesforce.apollo.cryptography.QualifiedBase64.qb64; -import static com.salesforce.apollo.model.schema.tables.Member.MEMBER; -import static com.salesforce.apollo.stereotomy.schema.tables.Identifier.IDENTIFIER; import static java.nio.file.Path.of; /** @@ -68,16 +60,16 @@ * @author hal.hildebrand */ abstract public class Domain { - protected static final Executor executor = Executors.newVirtualThreadPerTaskExecutor(); - private static final Logger log = LoggerFactory.getLogger(Domain.class); - protected final CHOAM choam; - protected final KERL commonKERL; - protected final ControlledIdentifierMember member; - protected final Mutator mutator; - protected final Oracle oracle; - protected final Parameters params; - protected final SqlStateMachine sqlStateMachine; - protected final Connection stateConnection; + private static final Logger log = LoggerFactory.getLogger(Domain.class); + + protected final Executor executor = Executors.newVirtualThreadPerTaskExecutor(); + protected final CHOAM choam; + protected final ControlledIdentifierMember member; + protected final Mutator mutator; + protected final Oracle oracle; + protected final Parameters params; + protected final SqlStateMachine sqlStateMachine; + protected final Connection stateConnection; public Domain(ControlledIdentifierMember member, Parameters.Builder params, String dbURL, Path checkpointBaseDir, RuntimeParameters.Builder runtime) { @@ -101,33 +93,17 @@ public Domain(ControlledIdentifierMember member, Parameters.Builder params, Stri .setProcessor(sqlStateMachine.getExecutor()) .setMember(member) .setRestorer(sqlStateMachine.getBootstrapper()) - .setKerl(() -> kerl()) - .setGenesisData(members -> genesisOf(members)) + .setKerl(this::kerl) + .setGenesisData(this::genesisOf) .build()); choam = new CHOAM(this.params); mutator = sqlStateMachine.getMutator(choam.getSession()); stateConnection = sqlStateMachine.newConnection(); this.oracle = new ShardedOracle(stateConnection, mutator, params.getSubmitTimeout()); - this.commonKERL = new ShardedKERL(stateConnection, mutator, params.getSubmitTimeout(), - params.getDigestAlgorithm()); log.info("Domain: {} member: {} db URL: {} checkpoint base dir: {}", this.params.context().getId(), member.getId(), dbURL, checkpointBaseDir); } - public static void addMembers(Connection connection, List members, String state) { - var context = DSL.using(connection, SQLDialect.H2); - for (var m : members) { - var id = context.insertInto(IDENTIFIER, IDENTIFIER.PREFIX) - .values(m) - .onDuplicateKeyIgnore() - .returning(IDENTIFIER.ID) - .fetchOne(); - if (id != null) { - context.insertInto(MEMBER).set(MEMBER.IDENTIFIER, id.value1()).onConflictDoNothing().execute(); - } - } - } - public static Txn boostrapMigration() { Map resources = new HashMap<>(); resources.put(of("/initialize.xml"), res("/initialize.xml")); @@ -145,15 +121,6 @@ public static Txn boostrapMigration() { .build(); } - public static boolean isMember(DSLContext context, SelfAddressingIdentifier id) { - final var idTable = com.salesforce.apollo.stereotomy.schema.tables.Identifier.IDENTIFIER; - return context.fetchExists(context.select(MEMBER.IDENTIFIER) - .from(MEMBER) - .join(idTable) - .on(idTable.ID.eq(MEMBER.IDENTIFIER)) - .and(idTable.PREFIX.eq(id.getDigest().getBytes()))); - } - public static Path tempDirOf(ControlledIdentifier id) { Path dir; try { @@ -169,20 +136,20 @@ private static URL res(String resource) { return Domain.class.getResource(resource); } - public boolean activate(Member m) { - if (!active()) { - return params.runtime() - .foundation() - .getFoundation() - .getMembershipList() - .stream() - .map(d -> Digest.from(d)) - .anyMatch(d -> m.getId().equals(d)); - } - final var context = DSL.using(stateConnection, SQLDialect.H2); - final var activeMember = isMember(context, new SelfAddressingIdentifier(m.getId())); - - return activeMember; + private static Transaction transactionOf(Message message, SignatureAlgorithm signatureAlgorithm, + DigestAlgorithm digestAlgorithm) { + ByteBuffer buff = ByteBuffer.allocate(4); + buff.putInt(0); + buff.flip(); + var signer = new Signer.MockSigner(signatureAlgorithm, ULong.MIN); + var digeste = digestAlgorithm.getOrigin().toDigeste(); + var sig = signer.sign(digeste.toByteString().asReadOnlyByteBuffer(), buff, + message.toByteString().asReadOnlyByteBuffer()); + return Transaction.newBuilder() + .setSource(digeste) + .setContent(message.toByteString()) + .setSignature(sig.toSig()) + .build(); } public boolean active() { @@ -207,13 +174,6 @@ public Identifier getIdentifier() { return member.getIdentifier().getIdentifier(); } - /** - * @return the adapter that provides raw Protobuf access to the underlying KERI resolution - */ - public ProtoKERLAdapter getKERLService() { - return new ProtoKERLAdapter(commonKERL); - } - public ControlledIdentifierMember getMember() { return member; } @@ -235,34 +195,37 @@ public String toString() { return getClass().getSimpleName() + "[" + getIdentifier() + "]"; } + protected Transaction migrations() { + return null; + } + + protected Transaction transactionOf(Message message) { + var signatureAlgorithm = params.viewSigAlgorithm(); + var digestAlgorithm = params.digestAlgorithm(); + return transactionOf(message, signatureAlgorithm, digestAlgorithm); + } + // Provide the list of transactions establishing the unified KERL of the group private List genesisOf(Map members) { - log.info("Genesis joins: {} on: {}", members.keySet().stream().map(m -> m.getId()).toList(), params.member()); - var sorted = new ArrayList(members.keySet()); + log.info("Genesis joins: {} on: {}", members.keySet().stream().map(Member::getId).toList(), params.member()); + var sorted = new ArrayList<>(members.keySet()); sorted.sort(Comparator.naturalOrder()); List transactions = new ArrayList<>(); // Schemas transactions.add(transactionOf(boostrapMigration())); + var migrations = migrations(); + if (migrations != null) { + // additional SQL migrations + transactions.add(migrations); + } sorted.stream() .map(e -> manifest(members.get(e))) .filter(Objects::nonNull) .flatMap(Collection::stream) .forEach(transactions::add); - transactions.add(initalMembership( - params.runtime().foundation().getFoundation().getMembershipList().stream().map(d -> Digest.from(d)).toList())); return transactions; } - private Transaction initalMembership(List digests) { - var call = mutator.call("{ call apollo_kernel.add_members(?, ?) }", digests.stream() - .map( - d -> new SelfAddressingIdentifier(d)) - .map( - id -> id.toIdent().toByteArray()) - .toList(), "active"); - return transactionOf(Txn.newBuilder().setCall(call).build()); - } - // Answer the KERL of this node private KERL_ kerl() { List kerl; @@ -271,7 +234,7 @@ private KERL_ kerl() { return KERL_.getDefaultInstance(); } var b = KERL_.newBuilder(); - kerl.stream().map(ewa -> ewa.toKeyEvente()).forEach(ke -> b.addEvents(ke)); + kerl.stream().map(EventWithAttachments::toKeyEvente).forEach(b::addEvents); return b.build(); } @@ -303,19 +266,4 @@ private Transaction transactionOf(KeyEventWithAttachments ke) { } return transactionOf(Txn.newBuilder().setBatched(batch.build()).build()); } - - private Transaction transactionOf(Message message) { - ByteBuffer buff = ByteBuffer.allocate(4); - buff.putInt(0); - buff.flip(); - var signer = new Signer.MockSigner(params.viewSigAlgorithm(), ULong.MIN); - var digeste = params.digestAlgorithm().getOrigin().toDigeste(); - var sig = signer.sign(digeste.toByteString().asReadOnlyByteBuffer(), buff, - message.toByteString().asReadOnlyByteBuffer()); - return Transaction.newBuilder() - .setSource(digeste) - .setContent(message.toByteString()) - .setSignature(sig.toSig()) - .build(); - } } diff --git a/model/src/main/java/com/salesforce/apollo/model/ProcessContainerDomain.java b/model/src/main/java/com/salesforce/apollo/model/ProcessContainerDomain.java new file mode 100644 index 0000000000..8c7882dad2 --- /dev/null +++ b/model/src/main/java/com/salesforce/apollo/model/ProcessContainerDomain.java @@ -0,0 +1,245 @@ +package com.salesforce.apollo.model; + +import com.salesforce.apollo.archipelago.Portal; +import com.salesforce.apollo.choam.Parameters; +import com.salesforce.apollo.comm.grpc.DomainSocketServerInterceptor; +import com.salesforce.apollo.cryptography.Digest; +import com.salesforce.apollo.cryptography.JohnHancock; +import com.salesforce.apollo.cryptography.Signer; +import com.salesforce.apollo.cryptography.proto.Digeste; +import com.salesforce.apollo.demesne.proto.DemesneParameters; +import com.salesforce.apollo.demesne.proto.SubContext; +import com.salesforce.apollo.fireflies.View; +import com.salesforce.apollo.membership.Member; +import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; +import com.salesforce.apollo.model.demesnes.Demesne; +import com.salesforce.apollo.model.demesnes.JniBridge; +import com.salesforce.apollo.model.demesnes.comm.DemesneKERLServer; +import com.salesforce.apollo.model.demesnes.comm.OuterContextServer; +import com.salesforce.apollo.model.demesnes.comm.OuterContextService; +import com.salesforce.apollo.stereotomy.event.Seal; +import com.salesforce.apollo.stereotomy.event.proto.AttachmentEvent; +import com.salesforce.apollo.stereotomy.event.proto.KeyState_; +import com.salesforce.apollo.stereotomy.identifier.BasicIdentifier; +import com.salesforce.apollo.stereotomy.identifier.SelfAddressingIdentifier; +import com.salesforce.apollo.stereotomy.identifier.spec.IdentifierSpecification; +import com.salesforce.apollo.stereotomy.identifier.spec.InteractionSpecification; +import com.salesforce.apollo.stereotomy.services.grpc.StereotomyMetrics; +import io.grpc.BindableService; +import io.grpc.ManagedChannel; +import io.grpc.Server; +import io.grpc.netty.DomainSocketNegotiatorHandler; +import io.grpc.netty.NettyChannelBuilder; +import io.grpc.netty.NettyServerBuilder; +import io.netty.channel.EventLoopGroup; +import io.netty.channel.unix.DomainSocketAddress; +import org.joou.ULong; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.net.InetSocketAddress; +import java.nio.file.Path; +import java.time.Duration; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.RejectedExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; + +import static com.salesforce.apollo.comm.grpc.DomainSocketServerInterceptor.IMPL; +import static com.salesforce.apollo.cryptography.QualifiedBase64.qb64; + +/** + * @author hal.hildebrand + **/ +public class ProcessContainerDomain extends ProcessDomain { + + private final static Logger log = LoggerFactory.getLogger( + ProcessContainerDomain.class); + private final static Class channelType = IMPL.getChannelType(); + + private final DomainSocketAddress bridge; + private final EventLoopGroup clientEventLoopGroup = IMPL.getEventLoopGroup(); + private final Path communicationsDirectory; + private final EventLoopGroup contextEventLoopGroup = IMPL.getEventLoopGroup(); + private final Map hostedDomains = new ConcurrentHashMap<>(); + private final DomainSocketAddress outerContextEndpoint; + private final Server outerContextService; + private final Portal portal; + private final DomainSocketAddress portalEndpoint; + private final EventLoopGroup portalEventLoopGroup = IMPL.getEventLoopGroup(); + private final Map routes = new HashMap<>(); + private final IdentifierSpecification.Builder subDomainSpecification; + + public ProcessContainerDomain(Digest group, ControlledIdentifierMember member, ProcessDomainParameters parameters, + Parameters.Builder builder, Parameters.RuntimeParameters.Builder runtime, + InetSocketAddress endpoint, Path commDirectory, + com.salesforce.apollo.fireflies.Parameters.Builder ff, + IdentifierSpecification.Builder subDomainSpecification, + StereotomyMetrics stereotomyMetrics) { + super(group, member, parameters, builder, runtime, endpoint, ff, stereotomyMetrics); + communicationsDirectory = commDirectory; + bridge = new DomainSocketAddress(communicationsDirectory.resolve(UUID.randomUUID().toString()).toFile()); + portalEndpoint = new DomainSocketAddress( + communicationsDirectory.resolve(UUID.randomUUID().toString()).toFile()); + portal = new Portal<>(member.getId(), NettyServerBuilder.forAddress(portalEndpoint) + .protocolNegotiator( + new DomainSocketNegotiatorHandler.DomainSocketNegotiator( + IMPL)) + .channelType(IMPL.getServerDomainSocketChannelClass()) + .workerEventLoopGroup(portalEventLoopGroup) + .bossEventLoopGroup(portalEventLoopGroup) + .intercept(new DomainSocketServerInterceptor()), + s -> handler(portalEndpoint), bridge, Duration.ofMillis(1), s -> routes.get(s)); + outerContextEndpoint = new DomainSocketAddress( + communicationsDirectory.resolve(UUID.randomUUID().toString()).toFile()); + outerContextService = NettyServerBuilder.forAddress(outerContextEndpoint) + .protocolNegotiator( + new DomainSocketNegotiatorHandler.DomainSocketNegotiator(IMPL)) + .channelType(IMPL.getServerDomainSocketChannelClass()) + .addService(new DemesneKERLServer(dht, null)) + .addService(outerContextService()) + .workerEventLoopGroup(contextEventLoopGroup) + .bossEventLoopGroup(contextEventLoopGroup) + .build(); + this.subDomainSpecification = subDomainSpecification; + } + + public SelfAddressingIdentifier spawn(DemesneParameters.Builder prototype) { + final var witness = member.getIdentifier().newEphemeral().get(); + final var cloned = prototype.clone(); + var parameters = cloned.setCommDirectory(communicationsDirectory.toString()) + .setPortal(portalEndpoint.path()) + .setParent(outerContextEndpoint.path()) + .build(); + var ctxId = Digest.from(parameters.getContext()); + final AtomicBoolean added = new AtomicBoolean(); + final var demesne = new JniBridge(parameters); + var computed = hostedDomains.computeIfAbsent(ctxId, k -> { + added.set(true); + return demesne; + }); + if (added.get()) { + var newSpec = subDomainSpecification.clone(); + // the receiver is a witness to the subdomain's delegated key + var newWitnesses = new ArrayList<>(subDomainSpecification.getWitnesses()); + newWitnesses.add(new BasicIdentifier(witness.getPublic())); + newSpec.setWitnesses(newWitnesses); + var incp = demesne.inception(member.getIdentifier().getIdentifier().toIdent(), newSpec); + var sigs = new HashMap(); + sigs.put(0, new Signer.SignerImpl(witness.getPrivate(), ULong.MIN).sign(incp.toKeyEvent_().toByteString())); + var attached = new com.salesforce.apollo.stereotomy.event.AttachmentEvent.AttachmentImpl(sigs); + var seal = Seal.EventSeal.construct(incp.getIdentifier(), incp.hash(dht.digestAlgorithm()), + incp.getSequenceNumber().longValue()); + var builder = InteractionSpecification.newBuilder().addAllSeals(Collections.singletonList(seal)); + KeyState_ ks = dht.append(AttachmentEvent.newBuilder() + .setCoordinates(incp.getCoordinates().toEventCoords()) + .setAttachment(attached.toAttachemente()) + .build()); + var coords = member.getIdentifier().seal(builder); + demesne.commit(coords.toEventCoords()); + demesne.start(); + return (SelfAddressingIdentifier) incp.getIdentifier(); + } + return computed.getId(); + } + + @Override + protected View.ViewLifecycleListener listener() { + var delegate = super.listener(); + return (context, id, join, leaving) -> { + delegate.viewChange(context, id, join, leaving); + log.info("View change: {} for: {} joining: {} leaving: {} on: {}", id, params.context().getId(), + join.size(), leaving.size(), params.member().getId()); + }; + } + + @Override + protected void startServices() { + super.startServices(); + try { + portal.start(); + } catch (IOException e) { + throw new IllegalStateException( + "Unable to start portal, local address: " + bridge.path() + " on: " + params.member().getId()); + } + try { + outerContextService.start(); + } catch (IOException e) { + throw new IllegalStateException( + "Unable to start outer context service, local address: " + outerContextEndpoint.path() + " on: " + + params.member().getId()); + } + } + + @Override + protected void stopServices() { + super.stopServices(); + portal.close(Duration.ofSeconds(30)); + try { + outerContextService.shutdown(); + } catch (RejectedExecutionException e) { + // eat + } catch (Throwable t) { + log.error("Exception shutting down process domain: {}", member.getId(), t); + } + try { + outerContextService.awaitTermination(30, TimeUnit.SECONDS); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + hostedDomains.values().forEach(d -> d.stop()); + var portalELG = portalEventLoopGroup.shutdownGracefully(100, 1_000, TimeUnit.MILLISECONDS); + var serverELG = contextEventLoopGroup.shutdownGracefully(100, 1_000, TimeUnit.MILLISECONDS); + var clientELG = clientEventLoopGroup.shutdownGracefully(100, 1_000, TimeUnit.MILLISECONDS); + try { + if (clientELG.await(30, TimeUnit.SECONDS)) { + log.info("Did not completely shutdown client event loop group for process: {}", member.getId()); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + return; + } + try { + if (!serverELG.await(30, TimeUnit.SECONDS)) { + log.info("Did not completely shutdown server event loop group for process: {}", member.getId()); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + return; + } + try { + if (!portalELG.await(30, TimeUnit.SECONDS)) { + log.info("Did not completely shutdown portal event loop group for process: {}", member.getId()); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + } + + private ManagedChannel handler(DomainSocketAddress address) { + return NettyChannelBuilder.forAddress(address) + .executor(executor) + .eventLoopGroup(clientEventLoopGroup) + .channelType(channelType) + .keepAliveTime(1, TimeUnit.SECONDS) + .usePlaintext() + .build(); + } + + private BindableService outerContextService() { + return new OuterContextServer(new OuterContextService() { + + @Override + public void deregister(Digeste context) { + routes.remove(qb64(Digest.from(context))); + } + + @Override + public void register(SubContext context) { + // routes.put("",qb64(Digest.from(context))); + } + }, null); + } +} diff --git a/model/src/main/java/com/salesforce/apollo/model/ProcessDomain.java b/model/src/main/java/com/salesforce/apollo/model/ProcessDomain.java index a25cf98eb5..1f574cbc9e 100644 --- a/model/src/main/java/com/salesforce/apollo/model/ProcessDomain.java +++ b/model/src/main/java/com/salesforce/apollo/model/ProcessDomain.java @@ -6,135 +6,73 @@ */ package com.salesforce.apollo.model; -import com.salesfoce.apollo.cryptography.proto.Digeste; -import com.salesfoce.apollo.demesne.proto.DemesneParameters; -import com.salesfoce.apollo.demesne.proto.SubContext; -import com.salesfoce.apollo.stereotomy.event.proto.AttachmentEvent; -import com.salesfoce.apollo.stereotomy.event.proto.KeyState_; -import com.salesforce.apollo.archipelago.Portal; import com.salesforce.apollo.choam.Parameters; import com.salesforce.apollo.choam.Parameters.Builder; -import com.salesforce.apollo.comm.grpc.DomainSocketServerInterceptor; -import com.salesforce.apollo.cryptography.*; +import com.salesforce.apollo.cryptography.Digest; +import com.salesforce.apollo.cryptography.DigestAlgorithm; +import com.salesforce.apollo.cryptography.SignatureAlgorithm; import com.salesforce.apollo.cryptography.cert.CertificateWithPrivateKey; import com.salesforce.apollo.fireflies.View; import com.salesforce.apollo.fireflies.View.Participant; import com.salesforce.apollo.fireflies.View.ViewLifecycleListener; import com.salesforce.apollo.membership.Context; -import com.salesforce.apollo.membership.Member; import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; -import com.salesforce.apollo.model.demesnes.Demesne; -import com.salesforce.apollo.model.demesnes.JniBridge; -import com.salesforce.apollo.model.demesnes.comm.DemesneKERLServer; -import com.salesforce.apollo.model.demesnes.comm.OuterContextServer; -import com.salesforce.apollo.model.demesnes.comm.OuterContextService; -import com.salesforce.apollo.stereotomy.EventCoordinates; import com.salesforce.apollo.stereotomy.EventValidation; -import com.salesforce.apollo.stereotomy.event.Seal; -import com.salesforce.apollo.stereotomy.identifier.BasicIdentifier; +import com.salesforce.apollo.stereotomy.Verifiers; import com.salesforce.apollo.stereotomy.identifier.SelfAddressingIdentifier; -import com.salesforce.apollo.stereotomy.identifier.spec.IdentifierSpecification; -import com.salesforce.apollo.stereotomy.identifier.spec.InteractionSpecification; +import com.salesforce.apollo.stereotomy.services.grpc.StereotomyMetrics; import com.salesforce.apollo.thoth.KerlDHT; -import io.grpc.BindableService; -import io.grpc.ManagedChannel; -import io.grpc.Server; -import io.grpc.netty.DomainSocketNegotiatorHandler.DomainSocketNegotiator; -import io.grpc.netty.NettyChannelBuilder; -import io.grpc.netty.NettyServerBuilder; -import io.netty.channel.EventLoopGroup; -import io.netty.channel.unix.DomainSocketAddress; import org.h2.jdbcx.JdbcConnectionPool; -import org.joou.ULong; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; import java.net.InetSocketAddress; import java.nio.file.Path; import java.time.Duration; import java.time.Instant; -import java.util.*; -import java.util.concurrent.ConcurrentHashMap; +import java.util.UUID; import java.util.concurrent.RejectedExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; - -import static com.salesforce.apollo.comm.grpc.DomainSocketServerInterceptor.IMPL; -import static com.salesforce.apollo.cryptography.QualifiedBase64.qb64; /** * The logical domain of the current "Process" - OS and Simulation defined, 'natch. *

    * The ProcessDomain represents a member node in the top level domain and represents the top level container model for - * the distributed system. This top level domain contains every sub domain as decendents. The membership of this domain - * is the entirety of all process members in the system. The Context of this domain is also the foundational fireflies - * membership domain of the entire system. + * the distributed system. The Context of this domain is the foundational fireflies membership domain for the group id. * * @author hal.hildebrand */ public class ProcessDomain extends Domain { - private final static Class channelType = IMPL.getChannelType(); - - private final static Logger log = LoggerFactory.getLogger(ProcessDomain.class); - - private final DomainSocketAddress bridge; - private final EventLoopGroup clientEventLoopGroup = IMPL.getEventLoopGroup(); - private final Path communicationsDirectory; - private final EventLoopGroup contextEventLoopGroup = IMPL.getEventLoopGroup(); - private final KerlDHT dht; - private final View foundation; - private final Map hostedDomains = new ConcurrentHashMap<>(); - private final UUID listener; - private final DomainSocketAddress outerContextEndpoint; - private final Server outerContextService; - private final Portal portal; - private final DomainSocketAddress portalEndpoint; - private final EventLoopGroup portalEventLoopGroup = IMPL.getEventLoopGroup(); - private final Map routes = new HashMap<>(); - private final IdentifierSpecification.Builder subDomainSpecification; + private final static Logger log = LoggerFactory.getLogger(ProcessDomain.class); + protected final KerlDHT dht; + protected final View foundation; + private final UUID listener; - public ProcessDomain(Digest group, ControlledIdentifierMember member, Builder builder, String dbURL, - Path checkpointBaseDir, Parameters.RuntimeParameters.Builder runtime, - InetSocketAddress endpoint, Path commDirectory, - com.salesforce.apollo.fireflies.Parameters.Builder ff, EventValidation eventValidation, - IdentifierSpecification.Builder subDomainSpecification) { - super(member, builder, dbURL, checkpointBaseDir, runtime); - communicationsDirectory = commDirectory; + public ProcessDomain(Digest group, ControlledIdentifierMember member, ProcessDomainParameters parameters, + Builder builder, Parameters.RuntimeParameters.Builder runtime, InetSocketAddress endpoint, + com.salesforce.apollo.fireflies.Parameters.Builder ff, StereotomyMetrics stereotomyMetrics) { + super(member, builder, parameters.dbURL, parameters.checkpointBaseDir, runtime); var base = Context.newBuilder() + .setBias(parameters.dhtBias) + .setpByz(parameters.dhtPbyz) .setId(group) - .setCardinality(params.runtime().foundation().getFoundation().getMembershipCount()) .build(); - this.foundation = new View(base, getMember(), endpoint, eventValidation, params.communications(), ff.build(), - DigestAlgorithm.DEFAULT, null); - final var url = String.format("jdbc:h2:mem:%s-%s;DB_CLOSE_DELAY=-1", member.getId(), ""); - JdbcConnectionPool connectionPool = JdbcConnectionPool.create(url, "", ""); + final var dhtUrl = String.format("jdbc:h2:mem:%s-%s;DB_CLOSE_DELAY=-1", member.getId(), UUID.randomUUID()); + JdbcConnectionPool connectionPool = JdbcConnectionPool.create(dhtUrl, "", ""); connectionPool.setMaxConnections(10); - dht = new KerlDHT(Duration.ofMillis(10), foundation.getContext(), member, connectionPool, - params.digestAlgorithm(), params.communications(), Duration.ofSeconds(1), 0.00125, null); + dht = new KerlDHT(parameters.dhtOpsFrequency, params.context(), member, connectionPool, + params.digestAlgorithm(), params.communications(), parameters.dhtOperationsTimeout, + parameters.dhtFpr, stereotomyMetrics); + var mock = true; + var validation = mock ? EventValidation.NONE : dht.getAni().eventValidation(parameters.dhtEventValidTO); + var verifiers = mock ? Verifiers.NONE : dht.getVerifiers(); + this.foundation = new View(base, getMember(), endpoint, validation, verifiers, params.communications(), + ff.build(), DigestAlgorithm.DEFAULT, null); listener = foundation.register(listener()); - bridge = new DomainSocketAddress(communicationsDirectory.resolve(UUID.randomUUID().toString()).toFile()); - portalEndpoint = new DomainSocketAddress( - communicationsDirectory.resolve(UUID.randomUUID().toString()).toFile()); - portal = new Portal<>(member.getId(), NettyServerBuilder.forAddress(portalEndpoint) - .protocolNegotiator(new DomainSocketNegotiator(IMPL)) - .channelType(IMPL.getServerDomainSocketChannelClass()) - .workerEventLoopGroup(portalEventLoopGroup) - .bossEventLoopGroup(portalEventLoopGroup) - .intercept(new DomainSocketServerInterceptor()), - s -> handler(portalEndpoint), bridge, Duration.ofMillis(1), s -> routes.get(s)); - outerContextEndpoint = new DomainSocketAddress( - communicationsDirectory.resolve(UUID.randomUUID().toString()).toFile()); - outerContextService = NettyServerBuilder.forAddress(outerContextEndpoint) - .protocolNegotiator(new DomainSocketNegotiator(IMPL)) - .channelType(IMPL.getServerDomainSocketChannelClass()) - .addService(new DemesneKERLServer(dht, null)) - .addService(outerContextService()) - .workerEventLoopGroup(contextEventLoopGroup) - .bossEventLoopGroup(contextEventLoopGroup) - .build(); - this.subDomainSpecification = subDomainSpecification; + } + + public KerlDHT getDht() { + return dht; } public View getFoundation() { @@ -145,45 +83,6 @@ public CertificateWithPrivateKey provision(Duration duration, SignatureAlgorithm return member.getIdentifier().provision(Instant.now(), duration, signatureAlgorithm); } - public SelfAddressingIdentifier spawn(DemesneParameters.Builder prototype) { - final var witness = member.getIdentifier().newEphemeral().get(); - final var cloned = prototype.clone(); - var parameters = cloned.setCommDirectory(communicationsDirectory.toString()) - .setPortal(portalEndpoint.path()) - .setParent(outerContextEndpoint.path()) - .build(); - var ctxId = Digest.from(parameters.getContext()); - final AtomicBoolean added = new AtomicBoolean(); - final var demesne = new JniBridge(parameters); - var computed = hostedDomains.computeIfAbsent(ctxId, k -> { - added.set(true); - return demesne; - }); - if (added.get()) { - var newSpec = subDomainSpecification.clone(); - // the receiver is a witness to the sub domain's delegated key - var newWitnesses = new ArrayList<>(subDomainSpecification.getWitnesses()); - newWitnesses.add(new BasicIdentifier(witness.getPublic())); - newSpec.setWitnesses(newWitnesses); - var incp = demesne.inception(member.getIdentifier().getIdentifier().toIdent(), newSpec); - var sigs = new HashMap(); - sigs.put(0, new Signer.SignerImpl(witness.getPrivate(), ULong.MIN).sign(incp.toKeyEvent_().toByteString())); - var attached = new com.salesforce.apollo.stereotomy.event.AttachmentEvent.AttachmentImpl(sigs); - var seal = Seal.EventSeal.construct(incp.getIdentifier(), incp.hash(dht.digestAlgorithm()), - incp.getSequenceNumber().longValue()); - var builder = InteractionSpecification.newBuilder().addAllSeals(Collections.singletonList(seal)); - KeyState_ ks = dht.append(AttachmentEvent.newBuilder() - .setCoordinates(incp.getCoordinates().toEventCoords()) - .setAttachment(attached.toAttachemente()) - .build()); - var coords = member.getIdentifier().seal(builder); - demesne.commit(coords.toEventCoords()); - demesne.start(); - return (SelfAddressingIdentifier) incp.getIdentifier(); - } - return computed.getId(); - } - @Override public void start() { startServices(); @@ -193,128 +92,40 @@ public void start() { @Override public void stop() { super.stop(); - hostedDomains.values().forEach(d -> d.stop()); foundation.deregister(listener); try { stopServices(); } catch (RejectedExecutionException e) { } - var portalELG = portalEventLoopGroup.shutdownGracefully(100, 1_000, TimeUnit.MILLISECONDS); - var serverELG = contextEventLoopGroup.shutdownGracefully(100, 1_000, TimeUnit.MILLISECONDS); - var clientELG = clientEventLoopGroup.shutdownGracefully(100, 1_000, TimeUnit.MILLISECONDS); - try { - if (clientELG.await(30, TimeUnit.SECONDS)) { - log.info("Did not completely shutdown client event loop group for process: {}", member.getId()); - } - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - return; - } - try { - if (!serverELG.await(30, TimeUnit.SECONDS)) { - log.info("Did not completely shutdown server event loop group for process: {}", member.getId()); - } - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - return; - } - try { - if (!portalELG.await(30, TimeUnit.SECONDS)) { - log.info("Did not completely shutdown portal event loop group for process: {}", member.getId()); - } - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } } - private ManagedChannel handler(DomainSocketAddress address) { - return NettyChannelBuilder.forAddress(address) - .executor(executor) - .eventLoopGroup(clientEventLoopGroup) - .channelType(channelType) - .keepAliveTime(1, TimeUnit.SECONDS) - .usePlaintext() - .build(); - } - - private ViewLifecycleListener listener() { - return new ViewLifecycleListener() { - - @Override - public void update(EventCoordinates update) { - // TODO Auto-generated method stub - - } - - @Override - public void viewChange(Context context, Digest id, List join, - List leaving) { - for (var d : join) { - if (d.getIdentifier() instanceof SelfAddressingIdentifier sai) { - params.context().activate(context.getMember(sai.getDigest())); - } - } - for (var d : leaving) { - params.context().remove(d); + protected ViewLifecycleListener listener() { + return (context, id, join, leaving) -> { + for (var d : join) { + if (d.getIdentifier() instanceof SelfAddressingIdentifier sai) { + params.context().activate(context.getMember(sai.getDigest())); } - - hostedDomains.forEach((viewId, demesne) -> { - demesne.viewChange(viewId, join, leaving); - }); - - log.info("View change: {} for: {} joining: {} leaving: {} on: {}", id, params.context().getId(), - join.size(), leaving.size(), params.member().getId()); } - }; - } - - private BindableService outerContextService() { - return new OuterContextServer(new OuterContextService() { - - @Override - public void deregister(Digeste context) { - routes.remove(qb64(Digest.from(context))); + for (var d : leaving) { + params.context().remove(d); } - @Override - public void register(SubContext context) { - // routes.put("",qb64(Digest.from(context))); - } - }, null); + log.info("View change: {} for: {} joining: {} leaving: {} on: {}", id, params.context().getId(), + join.size(), leaving.size(), params.member().getId()); + }; } - private void startServices() { - dht.start(Duration.ofMillis(10)); // TODO parameterize gossip frequency - try { - portal.start(); - } catch (IOException e) { - throw new IllegalStateException( - "Unable to start portal, local address: " + bridge.path() + " on: " + params.member().getId()); - } - try { - outerContextService.start(); - } catch (IOException e) { - throw new IllegalStateException( - "Unable to start outer context service, local address: " + outerContextEndpoint.path() + " on: " - + params.member().getId()); - } + protected void startServices() { + dht.start(params.gossipDuration()); } - private void stopServices() { - portal.close(Duration.ofSeconds(30)); - try { - outerContextService.shutdown(); - } catch (RejectedExecutionException e) { - // eat - } catch (Throwable t) { - log.error("Exception shutting down process domain: {}", member.getId(), t); - } - try { - outerContextService.awaitTermination(30, TimeUnit.SECONDS); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - } + protected void stopServices() { dht.stop(); } + + public record ProcessDomainParameters(String dbURL, Duration dhtOperationsTimeout, Path checkpointBaseDir, + Duration dhtOpsFrequency, double dhtFpr, Duration dhtEventValidTO, + int dhtBias, int jdbcMaxConnections, double dhtPbyz) { + } } diff --git a/model/src/main/java/com/salesforce/apollo/model/SubDomain.java b/model/src/main/java/com/salesforce/apollo/model/SubDomain.java index 1d0573cee0..cb0c1aae69 100644 --- a/model/src/main/java/com/salesforce/apollo/model/SubDomain.java +++ b/model/src/main/java/com/salesforce/apollo/model/SubDomain.java @@ -7,15 +7,17 @@ package com.salesforce.apollo.model; import com.codahale.metrics.Timer; -import com.salesfoce.apollo.demesne.proto.DelegationUpdate; -import com.salesfoce.apollo.demesne.proto.SignedDelegate; -import com.salesfoce.apollo.cryptography.proto.Biff; -import com.salesfoce.apollo.cryptography.proto.Digeste; import com.salesforce.apollo.archipelago.Enclave.RoutingClientIdentity; import com.salesforce.apollo.archipelago.RouterImpl.CommonCommunications; +import com.salesforce.apollo.bloomFilters.BloomFilter; +import com.salesforce.apollo.bloomFilters.BloomFilter.DigestBloomFilter; import com.salesforce.apollo.choam.Parameters.Builder; import com.salesforce.apollo.choam.Parameters.RuntimeParameters; import com.salesforce.apollo.cryptography.Digest; +import com.salesforce.apollo.cryptography.proto.Biff; +import com.salesforce.apollo.cryptography.proto.Digeste; +import com.salesforce.apollo.demesne.proto.DelegationUpdate; +import com.salesforce.apollo.demesne.proto.SignedDelegate; import com.salesforce.apollo.membership.Member; import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; import com.salesforce.apollo.model.comms.Delegation; @@ -23,8 +25,7 @@ import com.salesforce.apollo.model.comms.DelegationService; import com.salesforce.apollo.ring.RingCommunications; import com.salesforce.apollo.utils.Entropy; -import com.salesforce.apollo.bloomFilters.BloomFilter; -import com.salesforce.apollo.bloomFilters.BloomFilter.DigestBloomFilter; +import com.salesforce.apollo.utils.Utils; import org.h2.mvstore.MVMap; import org.h2.mvstore.MVStore; import org.slf4j.Logger; @@ -36,6 +37,7 @@ import java.util.Map; import java.util.Optional; import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -57,6 +59,9 @@ public class SubDomain extends Domain { private final RingCommunications ring; private final AtomicBoolean started = new AtomicBoolean(); private final MVStore store; + private final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1, + Thread.ofVirtual() + .factory()); public SubDomain(ControlledIdentifierMember member, Builder params, Path checkpointBaseDir, RuntimeParameters.Builder runtime, int maxTransfer, Duration gossipInterval, double fpr) { @@ -102,8 +107,8 @@ public void start() { super.start(); Duration initialDelay = gossipInterval.plusMillis(Entropy.nextBitsStreamLong(gossipInterval.toMillis())); log.trace("Starting SubDomain[{}:{}]", params.context().getId(), member.getId()); - Executors.newScheduledThreadPool(1, Thread.ofVirtual().factory()) - .schedule(() -> oneRound(), initialDelay.toMillis(), TimeUnit.MILLISECONDS); + scheduler.schedule(() -> Thread.ofVirtual().start(Utils.wrapped(() -> oneRound(), log)), + initialDelay.toMillis(), TimeUnit.MILLISECONDS); } @Override @@ -168,8 +173,8 @@ private void handle(Optional result, timer.stop(); } if (started.get()) { - Executors.newScheduledThreadPool(1, Thread.ofVirtual().factory()) - .schedule(() -> oneRound(), gossipInterval.toMillis(), TimeUnit.MILLISECONDS); + scheduler.schedule(() -> Thread.ofVirtual().start(Utils.wrapped(() -> oneRound(), log)), + gossipInterval.toMillis(), TimeUnit.MILLISECONDS); } } } @@ -181,13 +186,16 @@ private Biff have() { } private void oneRound() { - Timer.Context timer = null; - try { - ring.execute((link, ring) -> gossipRound(link, ring), - (result, destination) -> handle(result, destination, timer)); - } catch (Throwable e) { - log.error("Error in delegation gossip in SubDomain[{}:{}]", params.context().getId(), member.getId(), e); - } + Thread.ofVirtual().start(() -> { + Timer.Context timer = null; + try { + ring.execute((link, ring) -> gossipRound(link, ring), + (result, destination) -> handle(result, destination, timer)); + } catch (Throwable e) { + log.error("Error in delegation gossip in SubDomain[{}:{}]", params.context().getId(), member.getId(), + e); + } + }); } private DelegationUpdate.Builder update(DelegationUpdate update, DelegationUpdate.Builder builder) { diff --git a/model/src/main/java/com/salesforce/apollo/model/comms/Delegation.java b/model/src/main/java/com/salesforce/apollo/model/comms/Delegation.java index 386254b0b9..44b103d430 100644 --- a/model/src/main/java/com/salesforce/apollo/model/comms/Delegation.java +++ b/model/src/main/java/com/salesforce/apollo/model/comms/Delegation.java @@ -7,17 +7,16 @@ package com.salesforce.apollo.model.comms; import com.google.common.util.concurrent.ListenableFuture; -import com.salesfoce.apollo.demesne.proto.DelegationUpdate; -import com.salesfoce.apollo.cryptography.proto.Biff; +import com.salesforce.apollo.demesne.proto.DelegationUpdate; +import com.salesforce.apollo.cryptography.proto.Biff; import com.salesforce.apollo.archipelago.Link; /** * @author hal.hildebrand - * */ public interface Delegation extends Link { - DelegationUpdate gossip(Biff identifers); + DelegationUpdate gossip(Biff identifers); void update(DelegationUpdate update); diff --git a/model/src/main/java/com/salesforce/apollo/model/comms/DelegationClient.java b/model/src/main/java/com/salesforce/apollo/model/comms/DelegationClient.java index d079966fb5..33c34c0aac 100644 --- a/model/src/main/java/com/salesforce/apollo/model/comms/DelegationClient.java +++ b/model/src/main/java/com/salesforce/apollo/model/comms/DelegationClient.java @@ -7,9 +7,9 @@ package com.salesforce.apollo.model.comms; import com.codahale.metrics.Timer.Context; -import com.salesfoce.apollo.demesne.proto.DelegationGrpc; -import com.salesfoce.apollo.demesne.proto.DelegationUpdate; -import com.salesfoce.apollo.cryptography.proto.Biff; +import com.salesforce.apollo.demesne.proto.DelegationGrpc; +import com.salesforce.apollo.demesne.proto.DelegationUpdate; +import com.salesforce.apollo.cryptography.proto.Biff; import com.salesforce.apollo.archipelago.ManagedServerChannel; import com.salesforce.apollo.membership.Member; @@ -19,9 +19,9 @@ * @author hal.hildebrand */ public class DelegationClient implements Delegation { - private final ManagedServerChannel channel; + private final ManagedServerChannel channel; private final DelegationGrpc.DelegationBlockingStub client; - private final OuterServerMetrics metrics; + private final OuterServerMetrics metrics; public DelegationClient(ManagedServerChannel channel, OuterServerMetrics metrics) { this.metrics = metrics; diff --git a/model/src/main/java/com/salesforce/apollo/model/comms/DelegationServer.java b/model/src/main/java/com/salesforce/apollo/model/comms/DelegationServer.java index 1657d1d2ca..b2a2f5d47e 100644 --- a/model/src/main/java/com/salesforce/apollo/model/comms/DelegationServer.java +++ b/model/src/main/java/com/salesforce/apollo/model/comms/DelegationServer.java @@ -8,9 +8,9 @@ import com.codahale.metrics.Timer.Context; import com.google.protobuf.Empty; -import com.salesfoce.apollo.demesne.proto.DelegationGrpc.DelegationImplBase; -import com.salesfoce.apollo.demesne.proto.DelegationUpdate; -import com.salesfoce.apollo.cryptography.proto.Biff; +import com.salesforce.apollo.demesne.proto.DelegationGrpc.DelegationImplBase; +import com.salesforce.apollo.demesne.proto.DelegationUpdate; +import com.salesforce.apollo.cryptography.proto.Biff; import com.salesforce.apollo.archipelago.Enclave.RoutingClientIdentity; import com.salesforce.apollo.archipelago.RoutableService; @@ -18,7 +18,6 @@ /** * @author hal.hildebrand - * */ public class DelegationServer extends DelegationImplBase { private final RoutingClientIdentity identity; diff --git a/model/src/main/java/com/salesforce/apollo/model/comms/DelegationService.java b/model/src/main/java/com/salesforce/apollo/model/comms/DelegationService.java index 6f0dd90948..d8da20629f 100644 --- a/model/src/main/java/com/salesforce/apollo/model/comms/DelegationService.java +++ b/model/src/main/java/com/salesforce/apollo/model/comms/DelegationService.java @@ -6,13 +6,12 @@ */ package com.salesforce.apollo.model.comms; -import com.salesfoce.apollo.demesne.proto.DelegationUpdate; -import com.salesfoce.apollo.cryptography.proto.Biff; +import com.salesforce.apollo.demesne.proto.DelegationUpdate; +import com.salesforce.apollo.cryptography.proto.Biff; import com.salesforce.apollo.cryptography.Digest; /** * @author hal.hildebrand - * */ public interface DelegationService { DelegationUpdate gossip(Biff identifiers, Digest from); diff --git a/model/src/main/java/com/salesforce/apollo/model/delphinius/ShardedOracle.java b/model/src/main/java/com/salesforce/apollo/model/delphinius/ShardedOracle.java index 49ec6604a9..b1a6af8d43 100644 --- a/model/src/main/java/com/salesforce/apollo/model/delphinius/ShardedOracle.java +++ b/model/src/main/java/com/salesforce/apollo/model/delphinius/ShardedOracle.java @@ -17,7 +17,7 @@ import static com.salesforce.apollo.choam.Session.retryNesting; /** - * Oracle where write ops are JDBC stored procedure calls + * Oracle where write ops are JDBC stored procedure calls operating on the shared sql state * * @author hal.hildebrand */ @@ -99,6 +99,26 @@ public CompletableFuture add(Subject subject) { } } + public CompletableFuture add(Assertion assertion, int retries) { + return retryNesting(() -> add(assertion), retries); + } + + public CompletableFuture add(Namespace namespace, int retries) { + return retryNesting(() -> add(namespace), retries); + } + + public CompletableFuture add(Object object, int retries) { + return retryNesting(() -> add(object), retries); + } + + public CompletableFuture add(Relation relation, int retries) { + return retryNesting(() -> add(relation), retries); + } + + public CompletableFuture add(Subject subject, int retries) { + return retryNesting(() -> add(subject), retries); + } + @Override public CompletableFuture delete(Assertion assertion) { var call = mutator.call("call delphinius.deleteAssertion(?, ?, ?, ?, ?, ?, ?, ?) ", @@ -166,6 +186,26 @@ public CompletableFuture delete(Subject subject) { } } + public CompletableFuture delete(Assertion assertion, int retries) { + return retryNesting(() -> delete(assertion), retries); + } + + public CompletableFuture delete(Namespace namespace, int retries) { + return retryNesting(() -> delete(namespace), retries); + } + + public CompletableFuture delete(Object object, int retries) { + return retryNesting(() -> delete(object), retries); + } + + public CompletableFuture delete(Relation relation, int retries) { + return retryNesting(() -> delete(relation), retries); + } + + public CompletableFuture delete(Subject subject, int retries) { + return retryNesting(() -> delete(subject), retries); + } + @Override public CompletableFuture map(Object parent, Object child) { var call = mutator.call("call delphinius.mapObject(?, ?, ?, ?, ?, ?, ?, ?) ", parent.namespace().name(), @@ -209,6 +249,18 @@ public CompletableFuture map(Subject parent, Subject child) { } } + public CompletableFuture map(Object parent, Object child, int retries) { + return retryNesting(() -> map(parent, child), retries); + } + + public CompletableFuture map(Relation parent, Relation child, int retries) { + return retryNesting(() -> map(parent, child), retries); + } + + public CompletableFuture map(Subject parent, Subject child, int retries) { + return retryNesting(() -> map(parent, child), retries); + } + @Override public CompletableFuture remove(Object parent, Object child) { var call = mutator.call("call delphinius.removeObject(?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8) ", @@ -252,58 +304,6 @@ public CompletableFuture remove(Subject parent, Subject child) { } } - public CompletableFuture add(Assertion assertion, int retries) { - return retryNesting(() -> add(assertion), retries); - } - - public CompletableFuture add(Namespace namespace, int retries) { - return retryNesting(() -> add(namespace), retries); - } - - public CompletableFuture add(Object object, int retries) { - return retryNesting(() -> add(object), retries); - } - - public CompletableFuture add(Relation relation, int retries) { - return retryNesting(() -> add(relation), retries); - } - - public CompletableFuture add(Subject subject, int retries) { - return retryNesting(() -> add(subject), retries); - } - - public CompletableFuture delete(Assertion assertion, int retries) { - return retryNesting(() -> delete(assertion), retries); - } - - public CompletableFuture delete(Namespace namespace, int retries) { - return retryNesting(() -> delete(namespace), retries); - } - - public CompletableFuture delete(Object object, int retries) { - return retryNesting(() -> delete(object), retries); - } - - public CompletableFuture delete(Relation relation, int retries) { - return retryNesting(() -> delete(relation), retries); - } - - public CompletableFuture delete(Subject subject, int retries) { - return retryNesting(() -> delete(subject), retries); - } - - public CompletableFuture map(Object parent, Object child, int retries) { - return retryNesting(() -> map(parent, child), retries); - } - - public CompletableFuture map(Relation parent, Relation child, int retries) { - return retryNesting(() -> map(parent, child), retries); - } - - public CompletableFuture map(Subject parent, Subject child, int retries) { - return retryNesting(() -> map(parent, child), retries); - } - public CompletableFuture remove(Object parent, Object child, int retries) { return retryNesting(() -> remove(parent, child), retries); } diff --git a/model/src/main/java/com/salesforce/apollo/model/demesnes/Demesne.java b/model/src/main/java/com/salesforce/apollo/model/demesnes/Demesne.java index f2c3d67b1f..f9fa92c19c 100644 --- a/model/src/main/java/com/salesforce/apollo/model/demesnes/Demesne.java +++ b/model/src/main/java/com/salesforce/apollo/model/demesnes/Demesne.java @@ -8,8 +8,8 @@ import java.util.List; -import com.salesfoce.apollo.stereotomy.event.proto.EventCoords; -import com.salesfoce.apollo.stereotomy.event.proto.Ident; +import com.salesforce.apollo.stereotomy.event.proto.EventCoords; +import com.salesforce.apollo.stereotomy.event.proto.Ident; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.stereotomy.EventCoordinates; import com.salesforce.apollo.stereotomy.event.DelegatedInceptionEvent; @@ -22,7 +22,6 @@ * Domain Isolate interface * * @author hal.hildebrand - * */ public interface Demesne { diff --git a/model/src/main/java/com/salesforce/apollo/model/demesnes/DemesneImpl.java b/model/src/main/java/com/salesforce/apollo/model/demesnes/DemesneImpl.java index 1a19731766..02c5cc1ed1 100644 --- a/model/src/main/java/com/salesforce/apollo/model/demesnes/DemesneImpl.java +++ b/model/src/main/java/com/salesforce/apollo/model/demesnes/DemesneImpl.java @@ -6,18 +6,13 @@ */ package com.salesforce.apollo.model.demesnes; -import com.salesfoce.apollo.demesne.proto.DemesneParameters; -import com.salesfoce.apollo.demesne.proto.SubContext; -import com.salesfoce.apollo.stereotomy.event.proto.EventCoords; -import com.salesfoce.apollo.stereotomy.event.proto.Ident; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.KERLServiceGrpc; import com.salesforce.apollo.archipelago.Enclave; import com.salesforce.apollo.choam.Parameters; import com.salesforce.apollo.choam.Parameters.RuntimeParameters; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; -import com.salesforce.apollo.cryptography.JohnHancock; -import com.salesforce.apollo.cryptography.SigningThreshold; +import com.salesforce.apollo.demesne.proto.DemesneParameters; +import com.salesforce.apollo.demesne.proto.SubContext; import com.salesforce.apollo.membership.Context; import com.salesforce.apollo.membership.Member; import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; @@ -28,7 +23,8 @@ import com.salesforce.apollo.stereotomy.caching.CachingKERL; import com.salesforce.apollo.stereotomy.event.DelegatedInceptionEvent; import com.salesforce.apollo.stereotomy.event.DelegatedRotationEvent; -import com.salesforce.apollo.stereotomy.event.EstablishmentEvent; +import com.salesforce.apollo.stereotomy.event.proto.EventCoords; +import com.salesforce.apollo.stereotomy.event.proto.Ident; import com.salesforce.apollo.stereotomy.identifier.Identifier; import com.salesforce.apollo.stereotomy.identifier.SelfAddressingIdentifier; import com.salesforce.apollo.stereotomy.identifier.spec.IdentifierSpecification.Builder; @@ -36,7 +32,7 @@ import com.salesforce.apollo.stereotomy.jks.JksKeyStore; import com.salesforce.apollo.stereotomy.services.grpc.kerl.CommonKERLClient; import com.salesforce.apollo.stereotomy.services.grpc.kerl.KERLAdapter; -import com.salesforce.apollo.thoth.Ani; +import com.salesforce.apollo.stereotomy.services.grpc.proto.KERLServiceGrpc; import com.salesforce.apollo.thoth.Thoth; import com.salesforce.apollo.utils.Hex; import io.grpc.ManagedChannel; @@ -49,7 +45,6 @@ import java.io.File; import java.io.IOException; -import java.io.InputStream; import java.nio.file.Path; import java.security.GeneralSecurityException; import java.security.KeyStore; @@ -75,16 +70,16 @@ public class DemesneImpl implements Demesne { private static final Duration DEFAULT_GOSSIP_INTERVAL = Duration.ofMillis(5); private static final EventLoopGroup eventLoopGroup = IMPL.getEventLoopGroup(); private static final Logger log = LoggerFactory.getLogger(DemesneImpl.class); - private final static Executor executor = Executors.newVirtualThreadPerTaskExecutor(); - private final KERL kerl; - private final OuterContextClient outer; - private final DemesneParameters parameters; - private final AtomicBoolean started = new AtomicBoolean(); - private final Thoth thoth; - private final EventValidation validation; - private final Context context; - private volatile SubDomain domain; - private volatile Enclave enclave; + + private final Executor executor = Executors.newVirtualThreadPerTaskExecutor(); + private final KERL.AppendKERL kerl; + private final OuterContextClient outer; + private final DemesneParameters parameters; + private final AtomicBoolean started = new AtomicBoolean(); + private final Thoth thoth; + private final Context context; + private volatile SubDomain domain; + private volatile Enclave enclave; public DemesneImpl(DemesneParameters parameters) throws GeneralSecurityException, IOException { assert parameters.hasContext() : "Must define context id"; @@ -104,8 +99,6 @@ public DemesneImpl(DemesneParameters parameters) throws GeneralSecurityException keystore.load(null, password); kerl = kerlFrom(outerContextAddress); - validation = new Ani(context.getId(), kerl).eventValidation( - Duration.ofSeconds(parameters.getTimeout().getSeconds(), parameters.getTimeout().getNanos())); Stereotomy stereotomy = new StereotomyImpl(new JksKeyStore(keystore, passwordProvider), kerl, entropy); thoth = new Thoth(stereotomy); @@ -131,7 +124,7 @@ public void commit(EventCoords coordinates) { enclave = new Enclave(thoth.member(), new DomainSocketAddress(outerContextAddress), new DomainSocketAddress(commDirectory.resolve(parameters.getPortal()).toFile()), this::registerContext); - domain = subdomainFrom(parameters, commDirectory, outerContextAddress, thoth.member(), context); + domain = subdomainFrom(parameters, thoth.member(), context); } @Override @@ -177,9 +170,10 @@ public void stop() { @Override public void viewChange(Digest viewId, List joining, List leaving) { final var current = domain; - joining.forEach(coords -> current.activate( - new IdentifierMember(coords.getIdentifier().getDigest(kerl.getDigestAlgorithm()), - new KerlVerifier<>(coords.getIdentifier(), kerl)))); + joining.forEach(coords -> current.getContext() + .activate(new IdentifierMember( + coords.getIdentifier().getDigest(kerl.getDigestAlgorithm()), + new KerlVerifier<>(coords.getIdentifier(), kerl)))); leaving.forEach(id -> current.getContext().remove(id)); } @@ -240,52 +234,12 @@ private RuntimeParameters.Builder runtimeParameters(DemesneParameters parameters .setFoundation(parameters.getFoundation()); } - private SubDomain subdomainFrom(DemesneParameters parameters, final Path commDirectory, final File address, - ControlledIdentifierMember member, Context context) { + private SubDomain subdomainFrom(DemesneParameters parameters, ControlledIdentifierMember member, + Context context) { final var gossipInterval = parameters.getGossipInterval(); final var interval = gossipInterval.getSeconds() != 0 || gossipInterval.getNanos() != 0 ? Duration.ofSeconds( gossipInterval.getSeconds(), gossipInterval.getNanos()) : DEFAULT_GOSSIP_INTERVAL; return new SubDomain(member, Parameters.newBuilder(), runtimeParameters(parameters, member, context), parameters.getMaxTransfer(), interval, parameters.getFalsePositiveRate()); } - - public class DemesneMember implements Member { - private final Digest id; - protected EstablishmentEvent event; - - public DemesneMember(EstablishmentEvent event) { - this.event = event; - if (event.getIdentifier() instanceof SelfAddressingIdentifier sai) { - id = sai.getDigest(); - } else { - throw new IllegalArgumentException( - "Only self addressing identifiers supported: " + event.getIdentifier()); - } - } - - @Override - public int compareTo(Member m) { - return id.compareTo(m.getId()); - } - - @Override - public Filtered filtered(SigningThreshold threshold, JohnHancock signature, InputStream message) { - return validation.filtered(event.getCoordinates(), threshold, signature, message); - } - - @Override - public Digest getId() { - return id; - } - - @Override - public boolean verify(JohnHancock signature, InputStream message) { - return validation.verify(event.getCoordinates(), signature, message); - } - - @Override - public boolean verify(SigningThreshold threshold, JohnHancock signature, InputStream message) { - return validation.verify(event.getCoordinates(), threshold, signature, message); - } - } } diff --git a/model/src/main/java/com/salesforce/apollo/model/demesnes/JniBridge.java b/model/src/main/java/com/salesforce/apollo/model/demesnes/JniBridge.java index 20eb9b6130..db82fd6c0a 100644 --- a/model/src/main/java/com/salesforce/apollo/model/demesnes/JniBridge.java +++ b/model/src/main/java/com/salesforce/apollo/model/demesnes/JniBridge.java @@ -17,10 +17,10 @@ import org.slf4j.LoggerFactory; import com.google.protobuf.InvalidProtocolBufferException; -import com.salesfoce.apollo.demesne.proto.DemesneParameters; -import com.salesfoce.apollo.demesne.proto.ViewChange; -import com.salesfoce.apollo.stereotomy.event.proto.EventCoords; -import com.salesfoce.apollo.stereotomy.event.proto.Ident; +import com.salesforce.apollo.demesne.proto.DemesneParameters; +import com.salesforce.apollo.demesne.proto.ViewChange; +import com.salesforce.apollo.stereotomy.event.proto.EventCoords; +import com.salesforce.apollo.stereotomy.event.proto.Ident; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.stereotomy.EventCoordinates; import com.salesforce.apollo.stereotomy.event.DelegatedInceptionEvent; @@ -36,7 +36,6 @@ * Interface to SubDomain Demesne running in the GraalVM Isolate as JNI library * * @author hal.hildebrand - * */ public class JniBridge implements Demesne { private static final String DEMESNE_SHARED_LIB_NAME = "demesne"; @@ -50,6 +49,14 @@ public class JniBridge implements Demesne { } } + private final long isolateId; + + public JniBridge(DemesneParameters parameters) { + isolateId = createIsolate(); + final var serialized = parameters.toByteString().toByteArray(); + launch(isolateId, serialized, serialized.length); + } + private static native boolean active(long isolateId); private static native void commit(long isolateId, byte[] eventCoordinates, int eventCoordinatesLen); @@ -77,14 +84,6 @@ private static byte[] toBytes(char[] chars) { private static native boolean viewChange(long isolateId, byte[] parameters, int paramLength); - private final long isolateId; - - public JniBridge(DemesneParameters parameters) { - isolateId = createIsolate(); - final var serialized = parameters.toByteString().toByteArray(); - launch(isolateId, serialized, serialized.length); - } - @Override public boolean active() { return active(isolateId); diff --git a/model/src/main/java/com/salesforce/apollo/model/demesnes/comm/DemesneKERLServer.java b/model/src/main/java/com/salesforce/apollo/model/demesnes/comm/DemesneKERLServer.java index 9662b65719..0f64b7890b 100644 --- a/model/src/main/java/com/salesforce/apollo/model/demesnes/comm/DemesneKERLServer.java +++ b/model/src/main/java/com/salesforce/apollo/model/demesnes/comm/DemesneKERLServer.java @@ -8,9 +8,9 @@ import com.codahale.metrics.Timer.Context; import com.google.protobuf.Empty; -import com.salesfoce.apollo.stereotomy.event.proto.*; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.*; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.KERLServiceGrpc.KERLServiceImplBase; +import com.salesforce.apollo.stereotomy.event.proto.*; +import com.salesforce.apollo.stereotomy.services.grpc.proto.*; +import com.salesforce.apollo.stereotomy.services.grpc.proto.KERLServiceGrpc.KERLServiceImplBase; import com.salesforce.apollo.stereotomy.services.grpc.StereotomyMetrics; import com.salesforce.apollo.stereotomy.services.proto.ProtoKERLService; import io.grpc.stub.StreamObserver; @@ -22,7 +22,7 @@ */ public class DemesneKERLServer extends KERLServiceImplBase { private final StereotomyMetrics metrics; - private final ProtoKERLService service; + private final ProtoKERLService service; public DemesneKERLServer(ProtoKERLService service, StereotomyMetrics metrics) { this.metrics = metrics; @@ -44,7 +44,8 @@ public void append(KeyEventsContext request, StreamObserver responseO if (timer != null) { timer.stop(); } - var states = result == null ? KeyStates.getDefaultInstance() : KeyStates.newBuilder().addAllKeyStates(result).build(); + var states = + result == null ? KeyStates.getDefaultInstance() : KeyStates.newBuilder().addAllKeyStates(result).build(); responseObserver.onNext(states); responseObserver.onCompleted(); if (metrics != null) { @@ -90,7 +91,8 @@ public void appendKERL(KERLContext request, StreamObserver responseOb if (timer != null) { timer.stop(); } - var results = result == null ? KeyStates.getDefaultInstance() : KeyStates.newBuilder().addAllKeyStates(result).build(); + var results = + result == null ? KeyStates.getDefaultInstance() : KeyStates.newBuilder().addAllKeyStates(result).build(); responseObserver.onNext(results); responseObserver.onCompleted(); if (metrics != null) { @@ -122,7 +124,8 @@ public void appendValidations(Validations request, StreamObserver respons } @Override - public void appendWithAttachments(KeyEventWithAttachmentsContext request, StreamObserver responseObserver) { + public void appendWithAttachments(KeyEventWithAttachmentsContext request, + StreamObserver responseObserver) { Context timer = metrics != null ? metrics.appendWithAttachmentsService().time() : null; if (metrics != null) { metrics.inboundBandwidth().mark(request.getSerializedSize()); @@ -133,7 +136,8 @@ public void appendWithAttachments(KeyEventWithAttachmentsContext request, Stream responseObserver.onNext(KeyStates.getDefaultInstance()); responseObserver.onCompleted(); } else { - var states = result == null ? KeyStates.getDefaultInstance() : KeyStates.newBuilder().addAllKeyStates(result).build(); + var states = + result == null ? KeyStates.getDefaultInstance() : KeyStates.newBuilder().addAllKeyStates(result).build(); responseObserver.onNext(states); responseObserver.onCompleted(); if (metrics != null) { @@ -292,7 +296,8 @@ public void getKeyStateCoords(EventCoords request, StreamObserver res } @Override - public void getKeyStateWithAttachments(EventCoords request, StreamObserver responseObserver) { + public void getKeyStateWithAttachments(EventCoords request, + StreamObserver responseObserver) { Context timer = metrics != null ? metrics.getKeyStateService().time() : null; if (metrics != null) { final var serializedSize = request.getSerializedSize(); diff --git a/model/src/main/java/com/salesforce/apollo/model/demesnes/comm/OuterContextClient.java b/model/src/main/java/com/salesforce/apollo/model/demesnes/comm/OuterContextClient.java index 0ae4deabb0..9a92f6d9fd 100644 --- a/model/src/main/java/com/salesforce/apollo/model/demesnes/comm/OuterContextClient.java +++ b/model/src/main/java/com/salesforce/apollo/model/demesnes/comm/OuterContextClient.java @@ -7,16 +7,15 @@ package com.salesforce.apollo.model.demesnes.comm; import com.codahale.metrics.Timer.Context; -import com.salesfoce.apollo.demesne.proto.OuterContextGrpc; -import com.salesfoce.apollo.demesne.proto.OuterContextGrpc.OuterContextBlockingStub; -import com.salesfoce.apollo.demesne.proto.SubContext; -import com.salesfoce.apollo.cryptography.proto.Digeste; +import com.salesforce.apollo.demesne.proto.OuterContextGrpc; +import com.salesforce.apollo.demesne.proto.OuterContextGrpc.OuterContextBlockingStub; +import com.salesforce.apollo.demesne.proto.SubContext; +import com.salesforce.apollo.cryptography.proto.Digeste; import io.grpc.ManagedChannel; /** * @author hal.hildebrand - * */ public class OuterContextClient implements OuterContextService { private final OuterContextBlockingStub client; diff --git a/model/src/main/java/com/salesforce/apollo/model/demesnes/comm/OuterContextServer.java b/model/src/main/java/com/salesforce/apollo/model/demesnes/comm/OuterContextServer.java index 87d1637631..f4e3626fe0 100644 --- a/model/src/main/java/com/salesforce/apollo/model/demesnes/comm/OuterContextServer.java +++ b/model/src/main/java/com/salesforce/apollo/model/demesnes/comm/OuterContextServer.java @@ -8,20 +8,19 @@ import com.codahale.metrics.Timer.Context; import com.google.protobuf.Empty; -import com.salesfoce.apollo.demesne.proto.OuterContextGrpc.OuterContextImplBase; -import com.salesfoce.apollo.demesne.proto.SubContext; -import com.salesfoce.apollo.cryptography.proto.Digeste; +import com.salesforce.apollo.demesne.proto.OuterContextGrpc.OuterContextImplBase; +import com.salesforce.apollo.demesne.proto.SubContext; +import com.salesforce.apollo.cryptography.proto.Digeste; import com.salesforce.apollo.model.comms.OuterServerMetrics; import io.grpc.stub.StreamObserver; /** * @author hal.hildebrand - * */ public class OuterContextServer extends OuterContextImplBase { - private final OuterServerMetrics metrics; + private final OuterServerMetrics metrics; private final OuterContextService service; public OuterContextServer(OuterContextService service, OuterServerMetrics metrics) { diff --git a/model/src/main/java/com/salesforce/apollo/model/demesnes/comm/OuterContextService.java b/model/src/main/java/com/salesforce/apollo/model/demesnes/comm/OuterContextService.java index a3b2e232ba..e025046757 100644 --- a/model/src/main/java/com/salesforce/apollo/model/demesnes/comm/OuterContextService.java +++ b/model/src/main/java/com/salesforce/apollo/model/demesnes/comm/OuterContextService.java @@ -6,12 +6,11 @@ */ package com.salesforce.apollo.model.demesnes.comm; -import com.salesfoce.apollo.demesne.proto.SubContext; -import com.salesfoce.apollo.cryptography.proto.Digeste; +import com.salesforce.apollo.demesne.proto.SubContext; +import com.salesforce.apollo.cryptography.proto.Digeste; /** * @author hal.hildebrand - * */ public interface OuterContextService { void deregister(Digeste context); diff --git a/model/src/main/java/com/salesforce/apollo/model/stereotomy/ShardedKERL.java b/model/src/main/java/com/salesforce/apollo/model/stereotomy/ShardedKERL.java index d2f97387d0..ece2014e95 100644 --- a/model/src/main/java/com/salesforce/apollo/model/stereotomy/ShardedKERL.java +++ b/model/src/main/java/com/salesforce/apollo/model/stereotomy/ShardedKERL.java @@ -7,7 +7,7 @@ package com.salesforce.apollo.model.stereotomy; import com.google.protobuf.InvalidProtocolBufferException; -import com.salesfoce.apollo.stereotomy.event.proto.KeyState_; +import com.salesforce.apollo.stereotomy.event.proto.KeyState_; import com.salesforce.apollo.choam.support.InvalidTransaction; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.JohnHancock; diff --git a/model/src/main/resources/initialize.xml b/model/src/main/resources/initialize.xml index 4f200584c1..d105b81b5a 100644 --- a/model/src/main/resources/initialize.xml +++ b/model/src/main/resources/initialize.xml @@ -1,14 +1,9 @@ - + - - - - - - create alias apollo_kernel.add_members for - "com.salesforce.apollo.model.Domain.addMembers" - - - \ No newline at end of file + + + + diff --git a/model/src/test/java/com/salesforce/apollo/model/ContainmentDomainTest.java b/model/src/test/java/com/salesforce/apollo/model/ContainmentDomainTest.java new file mode 100644 index 0000000000..a33746955c --- /dev/null +++ b/model/src/test/java/com/salesforce/apollo/model/ContainmentDomainTest.java @@ -0,0 +1,135 @@ +/* + * Copyright (c) 2021, salesforce.com, inc. + * All rights reserved. + * SPDX-License-Identifier: BSD-3-Clause + * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause + */ +package com.salesforce.apollo.model; + +import com.salesforce.apollo.archipelago.LocalServer; +import com.salesforce.apollo.archipelago.Router; +import com.salesforce.apollo.archipelago.ServerConnectionCache; +import com.salesforce.apollo.choam.Parameters; +import com.salesforce.apollo.choam.Parameters.Builder; +import com.salesforce.apollo.choam.Parameters.ProducerParameters; +import com.salesforce.apollo.choam.Parameters.RuntimeParameters; +import com.salesforce.apollo.choam.proto.FoundationSeal; +import com.salesforce.apollo.cryptography.Digest; +import com.salesforce.apollo.cryptography.DigestAlgorithm; +import com.salesforce.apollo.delphinius.Oracle; +import com.salesforce.apollo.membership.ContextImpl; +import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; +import com.salesforce.apollo.stereotomy.StereotomyImpl; +import com.salesforce.apollo.stereotomy.identifier.spec.IdentifierSpecification; +import com.salesforce.apollo.stereotomy.mem.MemKERL; +import com.salesforce.apollo.stereotomy.mem.MemKeyStore; +import com.salesforce.apollo.utils.Entropy; +import com.salesforce.apollo.utils.Utils; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.net.InetSocketAddress; +import java.nio.file.Path; +import java.security.SecureRandom; +import java.time.Duration; +import java.util.ArrayList; +import java.util.UUID; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * @author hal.hildebrand + */ +public class ContainmentDomainTest { + private static final int CARDINALITY = 5; + private static final Digest GENESIS_VIEW_ID = DigestAlgorithm.DEFAULT.digest( + "Give me food or give me slack or kill me".getBytes()); + private final ArrayList domains = new ArrayList<>(); + private final ArrayList routers = new ArrayList<>(); + + @AfterEach + public void after() { + domains.forEach(Domain::stop); + domains.clear(); + routers.forEach(r -> r.close(Duration.ofSeconds(1))); + routers.clear(); + } + + @BeforeEach + public void before() throws Exception { + + final var commsDirectory = Path.of("target/comms"); + commsDirectory.toFile().mkdirs(); + + var ffParams = com.salesforce.apollo.fireflies.Parameters.newBuilder(); + var entropy = SecureRandom.getInstance("SHA1PRNG"); + entropy.setSeed(new byte[] { 6, 6, 6 }); + final var prefix = UUID.randomUUID().toString(); + Path checkpointDirBase = Path.of("target", "ct-chkpoints-" + Entropy.nextBitsStreamLong()); + Utils.clean(checkpointDirBase.toFile()); + var context = new ContextImpl<>(DigestAlgorithm.DEFAULT.getOrigin(), CARDINALITY, 0.2, 3); + var params = params(); + var stereotomy = new StereotomyImpl(new MemKeyStore(), new MemKERL(params.getDigestAlgorithm()), entropy); + + var identities = IntStream.range(0, CARDINALITY) + .mapToObj(i -> stereotomy.newIdentifier()) + .collect(Collectors.toMap(controlled -> controlled.getIdentifier().getDigest(), + controlled -> controlled)); + + var sealed = FoundationSeal.newBuilder().build(); + final var group = DigestAlgorithm.DEFAULT.getOrigin(); + identities.forEach((d, id) -> { + final var member = new ControlledIdentifierMember(id); + var localRouter = new LocalServer(prefix, member).router(ServerConnectionCache.newBuilder().setTarget(30)); + routers.add(localRouter); + var dbUrl = String.format("jdbc:h2:mem:%s-%s;DB_CLOSE_DELAY=-1", member.getId(), UUID.randomUUID()); + var pdParams = new ProcessDomain.ProcessDomainParameters(dbUrl, Duration.ofMinutes(1), checkpointDirBase, + Duration.ofMillis(10), 0.00125, + Duration.ofMinutes(1), 3, 10, 0.1); + var domain = new ProcessContainerDomain(group, member, pdParams, params, RuntimeParameters.newBuilder() + .setFoundation( + sealed) + .setContext( + context) + .setCommunications( + localRouter), + new InetSocketAddress(0), commsDirectory, ffParams, + IdentifierSpecification.newBuilder(), null); + domains.add(domain); + localRouter.start(); + }); + + domains.forEach(domain -> context.activate(domain.getMember())); + } + + @Test + public void smoke() throws Exception { + domains.forEach(e -> Thread.ofVirtual().start(e::start)); + final var activated = Utils.waitForCondition(60_000, 1_000, () -> domains.stream().allMatch(Domain::active)); + assertTrue(activated, "Domains did not fully activate: " + (domains.stream() + .filter(c -> !c.active()) + .map(Domain::logState) + .toList())); + var oracle = domains.get(0).getDelphi(); + oracle.add(new Oracle.Namespace("test")).get(); + DomainTest.smoke(oracle); + } + + private Builder params() { + var params = Parameters.newBuilder() + .setGenesisViewId(GENESIS_VIEW_ID) + .setGossipDuration(Duration.ofMillis(10)) + .setProducer(ProducerParameters.newBuilder() + .setGossipDuration(Duration.ofMillis(20)) + .setBatchInterval(Duration.ofMillis(100)) + .setMaxBatchByteSize(1024 * 1024) + .setMaxBatchCount(3000) + .build()) + .setCheckpointBlockDelta(200); + params.getProducer().ethereal().setNumberOfEpochs(4); + return params; + } +} diff --git a/model/src/test/java/com/salesforce/apollo/model/DomainTest.java b/model/src/test/java/com/salesforce/apollo/model/DomainTest.java index 9ca436dd23..25a714c972 100644 --- a/model/src/test/java/com/salesforce/apollo/model/DomainTest.java +++ b/model/src/test/java/com/salesforce/apollo/model/DomainTest.java @@ -6,8 +6,6 @@ */ package com.salesforce.apollo.model; -import com.salesfoce.apollo.choam.proto.Foundation; -import com.salesfoce.apollo.choam.proto.FoundationSeal; import com.salesforce.apollo.archipelago.LocalServer; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.ServerConnectionCache; @@ -15,15 +13,14 @@ import com.salesforce.apollo.choam.Parameters.Builder; import com.salesforce.apollo.choam.Parameters.ProducerParameters; import com.salesforce.apollo.choam.Parameters.RuntimeParameters; +import com.salesforce.apollo.choam.proto.FoundationSeal; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.delphinius.Oracle; import com.salesforce.apollo.delphinius.Oracle.Assertion; import com.salesforce.apollo.membership.ContextImpl; import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; -import com.salesforce.apollo.stereotomy.EventValidation; import com.salesforce.apollo.stereotomy.StereotomyImpl; -import com.salesforce.apollo.stereotomy.identifier.spec.IdentifierSpecification; import com.salesforce.apollo.stereotomy.mem.MemKERL; import com.salesforce.apollo.stereotomy.mem.MemKeyStore; import com.salesforce.apollo.utils.Entropy; @@ -197,10 +194,6 @@ public void after() { @BeforeEach public void before() throws Exception { - - final var commsDirectory = Path.of("target/comms"); - commsDirectory.toFile().mkdirs(); - var ffParams = com.salesforce.apollo.fireflies.Parameters.newBuilder(); var entropy = SecureRandom.getInstance("SHA1PRNG"); entropy.setSeed(new byte[] { 6, 6, 6 }); @@ -216,21 +209,22 @@ public void before() throws Exception { .collect(Collectors.toMap(controlled -> controlled.getIdentifier().getDigest(), controlled -> controlled)); - var foundation = Foundation.newBuilder(); - identities.keySet().forEach(d -> foundation.addMembership(d.toDigeste())); - var sealed = FoundationSeal.newBuilder().setFoundation(foundation).build(); + var sealed = FoundationSeal.newBuilder().build(); final var group = DigestAlgorithm.DEFAULT.getOrigin(); identities.forEach((d, id) -> { final var member = new ControlledIdentifierMember(id); var localRouter = new LocalServer(prefix, member).router(ServerConnectionCache.newBuilder().setTarget(30)); routers.add(localRouter); - var domain = new ProcessDomain(group, member, params, "jdbc:h2:mem:", checkpointDirBase, - RuntimeParameters.newBuilder() - .setFoundation(sealed) - .setContext(context) - .setCommunications(localRouter), new InetSocketAddress(0), - commsDirectory, ffParams, EventValidation.NONE, - IdentifierSpecification.newBuilder()); + var dbUrl = String.format("jdbc:h2:mem:%s-%s;DB_CLOSE_DELAY=-1", member.getId(), UUID.randomUUID()); + var pdParams = new ProcessDomain.ProcessDomainParameters(dbUrl, Duration.ofMinutes(1), checkpointDirBase, + Duration.ofMillis(10), 0.00125, + Duration.ofMinutes(1), 3, 10, 0.1); + var domain = new ProcessDomain(group, member, pdParams, params, RuntimeParameters.newBuilder() + .setFoundation(sealed) + .setContext(context) + .setCommunications( + localRouter), + new InetSocketAddress(0), ffParams, null); domains.add(domain); localRouter.start(); }); @@ -246,7 +240,7 @@ public void smoke() throws Exception { .filter(c -> !c.active()) .map(Domain::logState) .toList())); - var oracle = domains.get(0).getDelphi(); + var oracle = domains.getFirst().getDelphi(); oracle.add(new Oracle.Namespace("test")).get(); smoke(oracle); } diff --git a/model/src/test/java/com/salesforce/apollo/model/FireFliesTest.java b/model/src/test/java/com/salesforce/apollo/model/FireFliesTest.java index cac4d491f6..b7aab9b7cf 100644 --- a/model/src/test/java/com/salesforce/apollo/model/FireFliesTest.java +++ b/model/src/test/java/com/salesforce/apollo/model/FireFliesTest.java @@ -6,8 +6,6 @@ */ package com.salesforce.apollo.model; -import com.salesfoce.apollo.choam.proto.Foundation; -import com.salesfoce.apollo.choam.proto.FoundationSeal; import com.salesforce.apollo.archipelago.LocalServer; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.ServerConnectionCache; @@ -15,6 +13,7 @@ import com.salesforce.apollo.choam.Parameters.Builder; import com.salesforce.apollo.choam.Parameters.ProducerParameters; import com.salesforce.apollo.choam.Parameters.RuntimeParameters; +import com.salesforce.apollo.choam.proto.FoundationSeal; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.delphinius.Oracle; @@ -25,9 +24,7 @@ import com.salesforce.apollo.membership.ContextImpl; import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; import com.salesforce.apollo.stereotomy.EventCoordinates; -import com.salesforce.apollo.stereotomy.EventValidation; import com.salesforce.apollo.stereotomy.StereotomyImpl; -import com.salesforce.apollo.stereotomy.identifier.spec.IdentifierSpecification; import com.salesforce.apollo.stereotomy.mem.MemKERL; import com.salesforce.apollo.stereotomy.mem.MemKeyStore; import com.salesforce.apollo.utils.Entropy; @@ -42,7 +39,6 @@ import java.time.Duration; import java.util.*; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; @@ -71,10 +67,6 @@ public void after() { @BeforeEach public void before() throws Exception { - - final var commsDirectory = Path.of("target/comms"); - commsDirectory.toFile().mkdirs(); - var ffParams = com.salesforce.apollo.fireflies.Parameters.newBuilder(); var entropy = SecureRandom.getInstance("SHA1PRNG"); entropy.setSeed(new byte[] { 6, 6, 6 }); @@ -89,20 +81,21 @@ public void before() throws Exception { }).collect(Collectors.toMap(controlled -> controlled.getIdentifier().getDigest(), controlled -> controlled)); Digest group = DigestAlgorithm.DEFAULT.getOrigin(); - var foundation = Foundation.newBuilder(); - identities.keySet().forEach(d -> foundation.addMembership(d.toDigeste())); - var sealed = FoundationSeal.newBuilder().setFoundation(foundation).build(); + var sealed = FoundationSeal.newBuilder().build(); identities.forEach((digest, id) -> { var context = new ContextImpl<>(DigestAlgorithm.DEFAULT.getLast(), CARDINALITY, 0.2, 3); final var member = new ControlledIdentifierMember(id); var localRouter = new LocalServer(prefix, member).router(ServerConnectionCache.newBuilder().setTarget(30)); - var node = new ProcessDomain(group, member, params, "jdbc:h2:mem:", checkpointDirBase, - RuntimeParameters.newBuilder() - .setFoundation(sealed) - .setContext(context) - .setCommunications(localRouter), new InetSocketAddress(0), - commsDirectory, ffParams, EventValidation.NONE, - IdentifierSpecification.newBuilder()); + var dbUrl = String.format("jdbc:h2:mem:%s-%s;DB_CLOSE_DELAY=-1", member.getId(), UUID.randomUUID()); + var pdParams = new ProcessDomain.ProcessDomainParameters(dbUrl, Duration.ofSeconds(5), checkpointDirBase, + Duration.ofMillis(10), 0.00125, + Duration.ofSeconds(5), 3, 10, 0.1); + var node = new ProcessDomain(group, member, pdParams, params, RuntimeParameters.newBuilder() + .setFoundation(sealed) + .setContext(context) + .setCommunications( + localRouter), + new InetSocketAddress(0), ffParams, null); domains.add(node); routers.put(node, localRouter); localRouter.start(); @@ -115,16 +108,10 @@ public void smokin() throws Exception { long then = System.currentTimeMillis(); final var countdown = new CountDownLatch(domains.size()); final var seeds = Collections.singletonList( - new Seed(domains.get(0).getMember().getEvent().getCoordinates(), new InetSocketAddress(0))); + new Seed(domains.getFirst().getMember().getEvent(), new InetSocketAddress(0))); domains.forEach(d -> { var listener = new View.ViewLifecycleListener() { - @Override - public void update(EventCoordinates update) { - // TODO Auto-generated method stub - - } - @Override public void viewChange(Context context, Digest viewId, List joins, List leaves) { @@ -145,19 +132,17 @@ public void viewChange(Context context, Digest viewId, List(new CountDownLatch(1)); - domains.get(0) + domains.getFirst() .getFoundation() - .start(() -> started.get().countDown(), gossipDuration, Collections.emptyList(), - Executors.newScheduledThreadPool(2, Thread.ofVirtual().factory())); + .start(() -> started.get().countDown(), gossipDuration, Collections.emptyList()); assertTrue(started.get().await(10, TimeUnit.SECONDS), "Cannot start up kernel"); started.set(new CountDownLatch(CARDINALITY - 1)); - domains.subList(1, domains.size()).forEach(d -> { - d.getFoundation() - .start(() -> started.get().countDown(), gossipDuration, seeds, - Executors.newScheduledThreadPool(1, Thread.ofVirtual().factory())); - }); - assertTrue(started.get().await(10, TimeUnit.SECONDS), "could not start views"); + domains.subList(1, domains.size()) + .forEach(d -> Thread.ofVirtual() + .start(() -> d.getFoundation() + .start(() -> started.get().countDown(), gossipDuration, seeds))); + assertTrue(started.get().await(30, TimeUnit.SECONDS), "could not start views"); assertTrue(countdown.await(30, TimeUnit.SECONDS), "Could not join all members in all views"); diff --git a/model/src/test/java/com/salesforce/apollo/model/StoredProceduresTest.java b/model/src/test/java/com/salesforce/apollo/model/StoredProceduresTest.java deleted file mode 100644 index 8de97184b3..0000000000 --- a/model/src/test/java/com/salesforce/apollo/model/StoredProceduresTest.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) 2021, salesforce.com, inc. - * All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause - */ -package com.salesforce.apollo.model; - -import com.salesforce.apollo.cryptography.DigestAlgorithm; -import com.salesforce.apollo.state.Emulator; -import com.salesforce.apollo.stereotomy.identifier.SelfAddressingIdentifier; -import org.jooq.SQLDialect; -import org.jooq.impl.DSL; -import org.junit.jupiter.api.Test; - -import java.time.Duration; -import java.util.ArrayList; -import java.util.Random; - -import static com.salesforce.apollo.model.schema.tables.Member.MEMBER; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -/** - * @author hal.hildebrand - */ -public class StoredProceduresTest { - - @Test - public void membership() throws Exception { - var entropy = new Random(0x1638); - Duration timeout = Duration.ofSeconds(100); - Emulator emmy = new Emulator(); - emmy.start(Domain.boostrapMigration()); - - var ids = new ArrayList(); - for (int i = 0; i < 100; i++) { - ids.add(new SelfAddressingIdentifier(DigestAlgorithm.DEFAULT.random(entropy))); - } - - var call = emmy.getMutator() - .call("{call apollo_kernel.add_members(?, ?) }", - ids.stream().map(d -> d.getDigest().getBytes()).toList(), "active"); - - var result = emmy.getMutator().execute(call, timeout); - result.get(); - - var connector = emmy.newConnector(); - var context = DSL.using(connector, SQLDialect.H2); - - var members = context.selectFrom(MEMBER).fetch(); - assertEquals(ids.size(), members.size()); - - for (var digest : ids) { - assertTrue(Domain.isMember(context, digest), "Not an active member: " + digest); - } - } -} diff --git a/model/src/test/java/com/salesforce/apollo/model/demesnes/DemesneTest.java b/model/src/test/java/com/salesforce/apollo/model/demesnes/DemesneTest.java index 5483dd02e6..db0f5712d0 100644 --- a/model/src/test/java/com/salesforce/apollo/model/demesnes/DemesneTest.java +++ b/model/src/test/java/com/salesforce/apollo/model/demesnes/DemesneTest.java @@ -8,18 +8,14 @@ import com.google.protobuf.Any; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.cryptography.proto.Digeste; -import com.salesfoce.apollo.demesne.proto.DemesneParameters; -import com.salesfoce.apollo.demesne.proto.SubContext; -import com.salesfoce.apollo.test.proto.ByteMessage; -import com.salesfoce.apollo.test.proto.TestItGrpc; -import com.salesfoce.apollo.test.proto.TestItGrpc.TestItBlockingStub; -import com.salesfoce.apollo.test.proto.TestItGrpc.TestItImplBase; import com.salesforce.apollo.archipelago.*; import com.salesforce.apollo.archipelago.RouterImpl.CommonCommunications; import com.salesforce.apollo.comm.grpc.DomainSocketServerInterceptor; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; +import com.salesforce.apollo.cryptography.proto.Digeste; +import com.salesforce.apollo.demesne.proto.DemesneParameters; +import com.salesforce.apollo.demesne.proto.SubContext; import com.salesforce.apollo.membership.Member; import com.salesforce.apollo.membership.impl.SigningMemberImpl; import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; @@ -38,6 +34,10 @@ import com.salesforce.apollo.stereotomy.mem.MemKERL; import com.salesforce.apollo.stereotomy.mem.MemKeyStore; import com.salesforce.apollo.stereotomy.services.proto.ProtoKERLAdapter; +import com.salesforce.apollo.test.proto.ByteMessage; +import com.salesforce.apollo.test.proto.TestItGrpc; +import com.salesforce.apollo.test.proto.TestItGrpc.TestItBlockingStub; +import com.salesforce.apollo.test.proto.TestItGrpc.TestItImplBase; import com.salesforce.apollo.utils.Utils; import io.grpc.*; import io.grpc.ForwardingClientCall.SimpleForwardingClientCall; @@ -77,7 +77,8 @@ public class DemesneTest { private final static Class clientChannelType = IMPL.getChannelType(); private static final Class serverChannelType = IMPL.getServerDomainSocketChannelClass(); private final static Executor executor = Executors.newVirtualThreadPerTaskExecutor(); - private final TestItService local = new TestItService() { + + private final TestItService local = new TestItService() { @Override public void close() throws IOException { @@ -93,7 +94,7 @@ public Any ping(Any request) { return null; } }; - private EventLoopGroup eventLoopGroup; + private EventLoopGroup eventLoopGroup; public static ClientInterceptor clientInterceptor(Digest ctx) { return new ClientInterceptor() { @@ -273,7 +274,7 @@ public void register(SubContext context) { assertNotNull(attached); assertEquals(1, attached.seals().size()); final var extracted = attached.seals().get(0); - assertTrue(extracted instanceof Seal.DigestSeal); + assertTrue(extracted instanceof Seal.EventSeal); // assertEquals(1, attached.endorsements().size()); } diff --git a/model/src/test/java/com/salesforce/apollo/model/stereotomy/ShardedKERLTest.java b/model/src/test/java/com/salesforce/apollo/model/stereotomy/ShardedKERLTest.java index e6c94f8a3a..2334f18e4c 100644 --- a/model/src/test/java/com/salesforce/apollo/model/stereotomy/ShardedKERLTest.java +++ b/model/src/test/java/com/salesforce/apollo/model/stereotomy/ShardedKERLTest.java @@ -14,7 +14,7 @@ import com.salesforce.apollo.stereotomy.*; import com.salesforce.apollo.stereotomy.event.EstablishmentEvent; import com.salesforce.apollo.stereotomy.event.KeyEvent; -import com.salesforce.apollo.stereotomy.event.Seal.CoordinatesSeal; +import com.salesforce.apollo.stereotomy.event.Seal; import com.salesforce.apollo.stereotomy.event.Seal.DigestSeal; import com.salesforce.apollo.stereotomy.identifier.Identifier; import com.salesforce.apollo.stereotomy.identifier.SelfAddressingIdentifier; @@ -43,7 +43,7 @@ public class ShardedKERLTest { @BeforeEach public void before() throws Exception { secureRandom = SecureRandom.getInstance("SHA1PRNG"); - secureRandom.setSeed(new byte[]{0}); + secureRandom.setSeed(new byte[] { 0 }); } @Test @@ -52,8 +52,7 @@ public void delegated() throws Exception { Emulator emmy = new Emulator(); emmy.start(Domain.boostrapMigration()); - ShardedKERL kerl = new ShardedKERL(emmy.newConnector(), emmy.getMutator(), timeout, - DigestAlgorithm.DEFAULT); + ShardedKERL kerl = new ShardedKERL(emmy.newConnector(), emmy.getMutator(), timeout, DigestAlgorithm.DEFAULT); var ks = new MemKeyStore(); Stereotomy controller = new StereotomyImpl(ks, kerl, secureRandom); @@ -68,7 +67,7 @@ public void delegated() throws Exception { var sap = (SelfAddressingIdentifier) identifier.getIdentifier(); assertEquals(DigestAlgorithm.DEFAULT, sap.getDigest().getAlgorithm()); assertEquals("092126af01f80ca28e7a99bbdce229c029be3bbfcb791e29ccb7a64e8019a36f", - Hex.hex(sap.getDigest().getBytes())); + Hex.hex(sap.getDigest().getBytes())); assertEquals(1, ((Unweighted) identifier.getSigningThreshold()).getThreshold()); @@ -76,7 +75,8 @@ public void delegated() throws Exception { assertEquals(1, identifier.getKeys().size()); assertNotNull(identifier.getKeys().get(0)); - EstablishmentEvent lastEstablishmentEvent = (EstablishmentEvent) kerl.getKeyEvent(identifier.getLastEstablishmentEvent()); + EstablishmentEvent lastEstablishmentEvent = (EstablishmentEvent) kerl.getKeyEvent( + identifier.getLastEstablishmentEvent()); assertEquals(identifier.getKeys().get(0), lastEstablishmentEvent.getKeys().get(0)); var keyCoordinates = KeyCoordinates.of(lastEstablishmentEvent, 0); @@ -89,10 +89,10 @@ public void delegated() throws Exception { var keyStoreNextKeyPair = ks.getNextKey(keyCoordinates); assertTrue(keyStoreNextKeyPair.isPresent()); var expectedNextKeys = KeyConfigurationDigester.digest(SigningThreshold.unweighted(1), - List.of(keyStoreNextKeyPair.get().getPublic()), - identifier.getNextKeyConfigurationDigest() - .get() - .getAlgorithm()); + List.of(keyStoreNextKeyPair.get().getPublic()), + identifier.getNextKeyConfigurationDigest() + .get() + .getAlgorithm()); assertEquals(expectedNextKeys, identifier.getNextKeyConfigurationDigest().get()); // witnesses @@ -116,8 +116,7 @@ public void delegated() throws Exception { var digest = DigestAlgorithm.BLAKE3_256.digest("digest seal".getBytes()); var event = EventCoordinates.of(kerl.getKeyEvent(identifier.getLastEstablishmentEvent())); - var seals = List.of(DigestSeal.construct(digest), DigestSeal.construct(digest), - CoordinatesSeal.construct(event)); + var seals = List.of(DigestSeal.construct(digest), DigestSeal.construct(digest), Seal.construct(event)); identifier.rotate(); identifier.seal(InteractionSpecification.newBuilder()); @@ -131,8 +130,7 @@ public void direct() throws Exception { Emulator emmy = new Emulator(); emmy.start(Domain.boostrapMigration()); - ShardedKERL kerl = new ShardedKERL(emmy.newConnector(), emmy.getMutator(), timeout, - DigestAlgorithm.DEFAULT); + ShardedKERL kerl = new ShardedKERL(emmy.newConnector(), emmy.getMutator(), timeout, DigestAlgorithm.DEFAULT); Stereotomy controller = new StereotomyImpl(new MemKeyStore(), kerl, secureRandom); @@ -140,8 +138,7 @@ public void direct() throws Exception { var digest = DigestAlgorithm.BLAKE3_256.digest("digest seal".getBytes()); var event = EventCoordinates.of(kerl.getKeyEvent(i.getLastEstablishmentEvent())); - var seals = List.of(DigestSeal.construct(digest), DigestSeal.construct(digest), - CoordinatesSeal.construct(event)); + var seals = List.of(DigestSeal.construct(digest), DigestSeal.construct(digest), Seal.construct(event)); i.rotate(); i.seal(InteractionSpecification.newBuilder()); diff --git a/model/src/test/resources/logback-test.xml b/model/src/test/resources/logback-test.xml index 32e40bb942..ce3bc45184 100644 --- a/model/src/test/resources/logback-test.xml +++ b/model/src/test/resources/logback-test.xml @@ -2,63 +2,71 @@ - + %d{mm:ss.SSS} [%thread] %-5level %logger{0} - - %msg%n + %msg%n - + - + - + - + - + - + - + - + + + + + + + + + - + - + - + - + - \ No newline at end of file + diff --git a/pom.xml b/pom.xml index 6643c721b9..4ff5f455c6 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,6 @@ model delphinius schemas - demo stereotomy-services thoth liquibase-deterministic @@ -31,6 +30,7 @@ domain-epoll domain-kqueue domain-sockets + leyden @@ -41,10 +41,10 @@ 3.17.2 1.72 1.4.12 - 1.58.0 - 3.24.3 + 1.60.1 + 3.25.1 4.8.0 - 4.1.93.Final + 4.1.100.Final 0.9.27 5.9.1 2.0.3 @@ -174,6 +174,11 @@ cryptography ${project.version} + + com.salesforce.apollo + leyden + ${project.version} + com.salesforce.apollo isolates @@ -331,11 +336,6 @@ - - org.checkerframework - checker-qual - 3.26.0 - io.micrometer micrometer-core @@ -526,6 +526,11 @@ ${os.detected.classifier} provided + + com.macasaet.fernet + fernet-java8 + 1.4.2 + org.graalvm.sdk graal-sdk @@ -635,10 +640,6 @@ org.apache.maven.plugins maven-surefire-plugin - - 1 - false - org.apache.maven.plugins @@ -780,9 +781,18 @@ org.apache.maven.plugins maven-source-plugin 3.2.1 - - false - + + + attach-sources + verify + + jar-no-fork + + + true + + + org.apache.maven.plugins diff --git a/protocols/README.md b/protocols/README.md index 7a155c70d5..65e801de8f 100644 --- a/protocols/README.md +++ b/protocols/README.md @@ -1,10 +1,16 @@ # Apollo Protocols -Apollo uses GRPC for all network communication. The protols implemented by Apollo are quite simple, consisting of largely bytes, so the serialization speed and efficiency isn't particularly an issue. +Apollo uses GRPC for all network communication. The protocols implemented by Apollo are quite simple, consisting of +largely bytes, so the serialization speed and efficiency isn't particularly an issue. -This module implements the core communications abstraction used throughout the Apollo stack. This module also provides an MTLS _Server_ and outbound communications client used by other dependent modules. +This module implements the core communications abstraction used throughout the Apollo stack. This module also provides +an MTLS _Server_ and outbound communications client used by other dependent modules. ## Status -As this is a base module reused in all layers of the stack, this gets a good workout. The functional testing here is minimal (ha!) but the MTLS layer machinery gets a very good functional workout in the _Fireflies_ module, where we run a 100 member group with Fireflies. -GRPC naturally multiplexes client and server communications, so all communications go through one connection now, rather than one per protocol. +As this is a base module reused in all layers of the stack, this gets a good workout. The functional testing here is +minimal (ha!) but the MTLS layer machinery gets a very good functional workout in the _Fireflies_ module, where we run a +100 member group with Fireflies. + +GRPC naturally multiplexes client and server communications, so all communications go through one connection now, rather +than one per protocol. diff --git a/protocols/pom.xml b/protocols/pom.xml index 9a0f09ee47..4bfabe2353 100644 --- a/protocols/pom.xml +++ b/protocols/pom.xml @@ -37,6 +37,12 @@ io.grpc grpc-netty + + io.netty + netty-transport-native-unix-common + ${os.detected.classifier} + provided + io.perfmark perfmark-api @@ -114,19 +120,12 @@ - - io.netty - netty-transport-native-unix-common - ${netty.version} - ${os.detected.classifier} - test - io.netty netty-transport-native-kqueue ${netty.version} ${os.detected.classifier} - optional + provided @@ -138,19 +137,12 @@ - - io.netty - netty-transport-native-unix-common - ${netty.version} - ${os.detected.classifier} - test - io.netty netty-transport-native-epoll ${netty.version} ${os.detected.classifier} - optional + provided diff --git a/protocols/src/test/java/com/netflix/concurrency/limits/limiter/LifoBlockingLimiterTest.java b/protocols/src/test/java/com/netflix/concurrency/limits/limiter/LifoBlockingLimiterTest.java index 24679c4228..157b2ef249 100644 --- a/protocols/src/test/java/com/netflix/concurrency/limits/limiter/LifoBlockingLimiterTest.java +++ b/protocols/src/test/java/com/netflix/concurrency/limits/limiter/LifoBlockingLimiterTest.java @@ -1,32 +1,25 @@ package com.netflix.concurrency.limits.limiter; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; +import com.netflix.concurrency.limits.Limiter; +import com.netflix.concurrency.limits.limit.SettableLimit; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.util.Arrays; import java.util.List; import java.util.Optional; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.Executor; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; +import java.util.concurrent.*; import java.util.stream.Collectors; import java.util.stream.IntStream; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import com.netflix.concurrency.limits.Limiter; -import com.netflix.concurrency.limits.limit.SettableLimit; +import static org.junit.jupiter.api.Assertions.*; public class LifoBlockingLimiterTest { - private LifoBlockingLimiter blockingLimiter; - private final Executor executor = Executors.newVirtualThreadPerTaskExecutor(); - private SettableLimit limit; - private SimpleLimiter simpleLimiter; + private final Executor executor = Executors.newVirtualThreadPerTaskExecutor(); + private LifoBlockingLimiter blockingLimiter; + private SettableLimit limit; + private SimpleLimiter simpleLimiter; @Test public void adaptWhenLimitDecreases() { @@ -125,24 +118,27 @@ public void verifyFifoOrder() { // Kick off 5 requests with a small delay to ensure futures are created in the // correct order List values = new CopyOnWriteArrayList<>(); - List> futures = IntStream.range(0, 5).peek(i -> { - try { - TimeUnit.MILLISECONDS.sleep(50); - } catch (InterruptedException e) { - } - }).mapToObj(i -> CompletableFuture.supplyAsync(() -> { - Optional listener = blockingLimiter.acquire(i + 4); - if (!listener.isPresent()) { - return -1; - } - try { - return i; - } finally { - listener.get().onSuccess(); - } - }, executor)) - .peek(future -> future.whenComplete((value, - error) -> values.add(value))) + List> futures = IntStream.range(0, 5) + .peek(i -> { + try { + TimeUnit.MILLISECONDS.sleep(50); + } catch (InterruptedException e) { + } + }) + .mapToObj(i -> CompletableFuture.supplyAsync(() -> { + Optional listener = blockingLimiter.acquire( + i + 4); + if (!listener.isPresent()) { + return -1; + } + try { + return i; + } finally { + listener.get().onSuccess(); + } + }, executor)) + .peek(future -> future.whenComplete( + (value, error) -> values.add(value))) .collect(Collectors.toList()); // Release the first batch of tokens diff --git a/protocols/src/test/java/com/salesforce/apollo/comm/grpc/DomainSocketTest.java b/protocols/src/test/java/com/salesforce/apollo/comm/grpc/DomainSocketTest.java index 831ed3f4be..8d65d8e936 100644 --- a/protocols/src/test/java/com/salesforce/apollo/comm/grpc/DomainSocketTest.java +++ b/protocols/src/test/java/com/salesforce/apollo/comm/grpc/DomainSocketTest.java @@ -8,9 +8,9 @@ import com.google.common.primitives.Ints; import com.google.protobuf.Any; -import com.salesfoce.apollo.test.proto.PeerCreds; -import com.salesfoce.apollo.test.proto.TestItGrpc; -import com.salesfoce.apollo.test.proto.TestItGrpc.TestItImplBase; +import com.salesforce.apollo.test.proto.PeerCreds; +import com.salesforce.apollo.test.proto.TestItGrpc; +import com.salesforce.apollo.test.proto.TestItGrpc.TestItImplBase; import io.grpc.ManagedChannel; import io.grpc.Status; import io.grpc.StatusRuntimeException; diff --git a/protocols/src/test/java/com/salesforce/apollo/comm/grpc/MtlsClient.java b/protocols/src/test/java/com/salesforce/apollo/comm/grpc/MtlsClient.java index b3519c9954..d3bc3f599d 100644 --- a/protocols/src/test/java/com/salesforce/apollo/comm/grpc/MtlsClient.java +++ b/protocols/src/test/java/com/salesforce/apollo/comm/grpc/MtlsClient.java @@ -6,31 +6,30 @@ */ package com.salesforce.apollo.comm.grpc; -import static com.salesforce.apollo.comm.grpc.MtlsServer.forClient; - -import java.net.SocketAddress; -import java.security.PrivateKey; -import java.security.cert.X509Certificate; -import java.util.concurrent.Executor; - import com.netflix.concurrency.limits.Limiter; import com.netflix.concurrency.limits.grpc.client.ConcurrencyLimitClientInterceptor; import com.netflix.concurrency.limits.grpc.client.GrpcClientLimiterBuilder; import com.netflix.concurrency.limits.grpc.client.GrpcClientRequestContext; import com.salesforce.apollo.cryptography.ssl.CertificateValidator; - import io.grpc.ManagedChannel; import io.grpc.Status; import io.grpc.netty.NettyChannelBuilder; import io.netty.handler.ssl.ClientAuth; +import java.net.SocketAddress; +import java.security.PrivateKey; +import java.security.cert.X509Certificate; +import java.util.concurrent.Executor; +import java.util.concurrent.Executors; + +import static com.salesforce.apollo.comm.grpc.MtlsServer.forClient; + /** * @author hal.hildebrand - * */ public class MtlsClient { - - private final ManagedChannel channel; + private static final Executor exec = Executors.newVirtualThreadPerTaskExecutor(); + private final ManagedChannel channel; public MtlsClient(SocketAddress address, ClientAuth clientAuth, String alias, ClientContextSupplier supplier, CertificateValidator validator, Executor exec) { @@ -40,20 +39,22 @@ public MtlsClient(SocketAddress address, ClientAuth clientAuth, String alias, Cl .executor(exec) .sslContext(supplier.forClient(clientAuth, alias, validator, MtlsServer.TL_SV1_3)) .intercept(new ConcurrencyLimitClientInterceptor(limiter, - () -> Status.RESOURCE_EXHAUSTED.withDescription("Client side concurrency limit exceeded"))) + () -> Status.RESOURCE_EXHAUSTED.withDescription( + "Client side concurrency limit exceeded"))) .build(); } public MtlsClient(SocketAddress address, ClientAuth clientAuth, String alias, X509Certificate certificate, - PrivateKey privateKey, CertificateValidator validator, Executor exec) { + PrivateKey privateKey, CertificateValidator validator) { Limiter limiter = new GrpcClientLimiterBuilder().blockOnLimit(false).build(); channel = NettyChannelBuilder.forAddress(address) .executor(exec) .sslContext(forClient(clientAuth, alias, certificate, privateKey, validator)) .intercept(new ConcurrencyLimitClientInterceptor(limiter, - () -> Status.RESOURCE_EXHAUSTED.withDescription("Client side concurrency limit exceeded"))) + () -> Status.RESOURCE_EXHAUSTED.withDescription( + "Client side concurrency limit exceeded"))) .build(); } diff --git a/protocols/src/test/java/com/salesforce/apollo/comm/grpc/MtlsServer.java b/protocols/src/test/java/com/salesforce/apollo/comm/grpc/MtlsServer.java index 864d485ddd..dd6900baf9 100644 --- a/protocols/src/test/java/com/salesforce/apollo/comm/grpc/MtlsServer.java +++ b/protocols/src/test/java/com/salesforce/apollo/comm/grpc/MtlsServer.java @@ -6,19 +6,6 @@ */ package com.salesforce.apollo.comm.grpc; -import java.io.IOException; -import java.net.SocketAddress; -import java.security.PrivateKey; -import java.security.Provider; -import java.security.Security; -import java.security.cert.X509Certificate; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.Executor; - -import javax.net.ssl.SSLException; -import javax.net.ssl.SSLPeerUnverifiedException; -import javax.net.ssl.SSLSession; - import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; @@ -28,60 +15,75 @@ import com.salesforce.apollo.cryptography.ssl.NodeTrustManagerFactory; import com.salesforce.apollo.cryptography.ssl.TlsInterceptor; import com.salesforce.apollo.protocols.ClientIdentity; - -import io.grpc.BindableService; -import io.grpc.Context; -import io.grpc.Metadata; -import io.grpc.Server; -import io.grpc.ServerCall; -import io.grpc.ServerCallHandler; -import io.grpc.ServerInterceptor; +import io.grpc.*; import io.grpc.netty.GrpcSslContexts; import io.grpc.netty.NettyServerBuilder; import io.grpc.util.MutableHandlerRegistry; import io.netty.channel.ChannelOption; -import io.netty.handler.ssl.ApplicationProtocolConfig; +import io.netty.handler.ssl.*; import io.netty.handler.ssl.ApplicationProtocolConfig.Protocol; import io.netty.handler.ssl.ApplicationProtocolConfig.SelectedListenerFailureBehavior; import io.netty.handler.ssl.ApplicationProtocolConfig.SelectorFailureBehavior; -import io.netty.handler.ssl.ApplicationProtocolNames; -import io.netty.handler.ssl.ClientAuth; -import io.netty.handler.ssl.SslContext; -import io.netty.handler.ssl.SslContextBuilder; -import io.netty.handler.ssl.SslProvider; + +import javax.net.ssl.SSLException; +import javax.net.ssl.SSLPeerUnverifiedException; +import javax.net.ssl.SSLSession; +import java.io.IOException; +import java.net.SocketAddress; +import java.security.PrivateKey; +import java.security.Provider; +import java.security.Security; +import java.security.cert.X509Certificate; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Executors; /** * @author hal.hildebrand - * */ public class MtlsServer implements ClientIdentity { - /** - * Currently grpc-java doesn't return compressed responses, even if the client - * has sent a compressed payload. This turns on gzip compression for all - * responses. - */ - public static class EnableCompressionInterceptor implements ServerInterceptor { - public final static EnableCompressionInterceptor SINGLETON = new EnableCompressionInterceptor(); + public static final String TL_SV1_3 = "TLSv1.3"; + private static final Provider PROVIDER_JSSE = Security.getProvider("SunJSSE"); + private final LoadingCache cachedMembership; + private final TlsInterceptor interceptor; + private final MutableHandlerRegistry registry; + private final Server server; + private final Context.Key sslSessionContext = Context.key("SSLSession"); - @Override - public io.grpc.ServerCall.Listener interceptCall(ServerCall call, - Metadata headers, - ServerCallHandler next) { - call.setCompression("gzip"); - return next.startCall(call, headers); - } + public MtlsServer(SocketAddress address, ClientAuth clientAuth, String alias, ServerContextSupplier supplier, + CertificateValidator validator, MutableHandlerRegistry registry) { + this.registry = registry; + interceptor = new TlsInterceptor(sslSessionContext); + cachedMembership = CacheBuilder.newBuilder().build(new CacheLoader() { + @Override + public Digest load(X509Certificate key) throws Exception { + return supplier.getMemberId(key); + } + }); + NettyServerBuilder builder = NettyServerBuilder.forAddress(address) + .withOption(ChannelOption.SO_REUSEADDR, true) + .sslContext( + supplier.forServer(clientAuth, alias, validator, PROVIDER_JSSE)) + .fallbackHandlerRegistry(registry) + .withChildOption(ChannelOption.TCP_NODELAY, true) + .intercept(interceptor) + .intercept(EnableCompressionInterceptor.SINGLETON); + builder.executor(Executors.newVirtualThreadPerTaskExecutor()); + server = builder.build(); + Runtime.getRuntime().addShutdownHook(new Thread() { + @Override + public void run() { + server.shutdown(); + } + }); } - public static final String TL_SV1_3 = "TLSv1.3"; - - private static final Provider PROVIDER_JSSE = Security.getProvider("SunJSSE"); - public static SslContext forClient(ClientAuth clientAuth, String alias, X509Certificate certificate, PrivateKey privateKey, CertificateValidator validator) { SslContextBuilder builder = SslContextBuilder.forClient() .sslContextProvider(PROVIDER_JSSE) - .keyManager(new NodeKeyManagerFactory(alias, certificate, - privateKey, PROVIDER_JSSE)); + .keyManager( + new NodeKeyManagerFactory(alias, certificate, privateKey, + PROVIDER_JSSE)); GrpcSslContexts.configure(builder, SslProvider.JDK); builder.protocols(TL_SV1_3) .sslContextProvider(PROVIDER_JSSE) @@ -108,8 +110,8 @@ public static SslContext forClient(ClientAuth clientAuth, String alias, X509Cert public static SslContext forServer(ClientAuth clientAuth, String alias, X509Certificate certificate, PrivateKey privateKey, CertificateValidator validator) { - SslContextBuilder builder = SslContextBuilder.forServer(new NodeKeyManagerFactory(alias, certificate, - privateKey, PROVIDER_JSSE)); + SslContextBuilder builder = SslContextBuilder.forServer( + new NodeKeyManagerFactory(alias, certificate, privateKey, PROVIDER_JSSE)); GrpcSslContexts.configure(builder, SslProvider.JDK); builder.protocols(TL_SV1_3) .sslContextProvider(PROVIDER_JSSE) @@ -134,41 +136,6 @@ public static SslContext forServer(ClientAuth clientAuth, String alias, X509Cert } - private final LoadingCache cachedMembership; - private final TlsInterceptor interceptor; - private final MutableHandlerRegistry registry; - private final Server server; - private final Context.Key sslSessionContext = Context.key("SSLSession"); - - public MtlsServer(SocketAddress address, ClientAuth clientAuth, String alias, ServerContextSupplier supplier, - CertificateValidator validator, MutableHandlerRegistry registry, Executor executor) { - this.registry = registry; - interceptor = new TlsInterceptor(sslSessionContext); - cachedMembership = CacheBuilder.newBuilder().build(new CacheLoader() { - @Override - public Digest load(X509Certificate key) throws Exception { - return supplier.getMemberId(key); - } - }); - NettyServerBuilder builder = NettyServerBuilder.forAddress(address) - .executor(executor) - .withOption(ChannelOption.SO_REUSEADDR, true) - .sslContext(supplier.forServer(clientAuth, alias, validator, - PROVIDER_JSSE)) - .fallbackHandlerRegistry(registry) - .withChildOption(ChannelOption.TCP_NODELAY, true) - .intercept(interceptor) - .intercept(EnableCompressionInterceptor.SINGLETON); - builder.executor(executor); - server = builder.build(); - Runtime.getRuntime().addShutdownHook(new Thread() { - @Override - public void run() { - server.shutdown(); - } - }); - } - public void bind(BindableService service) { registry.addService(service); } @@ -197,4 +164,20 @@ private X509Certificate getCert() { throw new IllegalStateException(e); } } + + /** + * Currently grpc-java doesn't return compressed responses, even if the client has sent a compressed payload. This + * turns on gzip compression for all responses. + */ + public static class EnableCompressionInterceptor implements ServerInterceptor { + public final static EnableCompressionInterceptor SINGLETON = new EnableCompressionInterceptor(); + + @Override + public io.grpc.ServerCall.Listener interceptCall(ServerCall call, + Metadata headers, + ServerCallHandler next) { + call.setCompression("gzip"); + return next.startCall(call, headers); + } + } } diff --git a/protocols/src/test/java/com/salesforce/apollo/comm/grpc/TestMtls.java b/protocols/src/test/java/com/salesforce/apollo/comm/grpc/TestMtls.java index f71b3d2aba..7791488738 100644 --- a/protocols/src/test/java/com/salesforce/apollo/comm/grpc/TestMtls.java +++ b/protocols/src/test/java/com/salesforce/apollo/comm/grpc/TestMtls.java @@ -6,39 +6,35 @@ */ package com.salesforce.apollo.comm.grpc; -import static org.junit.jupiter.api.Assertions.assertNotNull; - -import java.net.InetAddress; -import java.net.InetSocketAddress; -import java.security.KeyPair; -import java.security.Provider; -import java.security.cert.CertificateException; -import java.security.cert.X509Certificate; -import java.time.Instant; -import java.util.Collections; -import java.util.concurrent.ForkJoinPool; - -import org.junit.jupiter.api.Test; - import com.google.protobuf.Any; -import com.salesfoce.apollo.test.proto.TestItGrpc; -import com.salesfoce.apollo.test.proto.TestItGrpc.TestItImplBase; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.SignatureAlgorithm; import com.salesforce.apollo.cryptography.cert.CertificateWithPrivateKey; import com.salesforce.apollo.cryptography.cert.Certificates; import com.salesforce.apollo.cryptography.ssl.CertificateValidator; +import com.salesforce.apollo.test.proto.TestItGrpc; +import com.salesforce.apollo.test.proto.TestItGrpc.TestItImplBase; import com.salesforce.apollo.utils.Utils; - import io.grpc.stub.StreamObserver; import io.grpc.util.MutableHandlerRegistry; import io.netty.handler.ssl.ClientAuth; import io.netty.handler.ssl.SslContext; +import org.junit.jupiter.api.Test; + +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.security.KeyPair; +import java.security.Provider; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; +import java.time.Instant; +import java.util.Collections; + +import static org.junit.jupiter.api.Assertions.assertNotNull; /** * @author hal.hildebrand - * */ public class TestMtls { @@ -47,10 +43,9 @@ public static CertificateWithPrivateKey getMember(Digest id) { var notBefore = Instant.now(); var notAfter = Instant.now().plusSeconds(10_000); String localhost = InetAddress.getLoopbackAddress().getHostName(); - X509Certificate generated = Certificates.selfSign(false, - Utils.encode(id, localhost, Utils.allocatePort(), - keyPair.getPublic()), - keyPair, notBefore, notAfter, Collections.emptyList()); + X509Certificate generated = Certificates.selfSign(false, Utils.encode(id, localhost, Utils.allocatePort(), + keyPair.getPublic()), keyPair, notBefore, + notAfter, Collections.emptyList()); return new CertificateWithPrivateKey(generated, keyPair.getPrivate()); } @@ -85,7 +80,7 @@ private MtlsClient client(InetSocketAddress serverAddress) { CertificateWithPrivateKey clientCert = clientIdentity(); MtlsClient client = new MtlsClient(serverAddress, ClientAuth.REQUIRE, "foo", clientCert.getX509Certificate(), - clientCert.getPrivateKey(), validator(), r -> r.run()); + clientCert.getPrivateKey(), validator()); return client; } @@ -120,7 +115,7 @@ public SslContext forServer(ClientAuth clientAuth, String alias, CertificateVali public Digest getMemberId(X509Certificate key) { return Digest.NONE; } - }, validator(), new MutableHandlerRegistry(), ForkJoinPool.commonPool()); + }, validator(), new MutableHandlerRegistry()); return server; } diff --git a/schemas/src/main/resources/model/model.xml b/schemas/src/main/resources/model/model.xml index 8a51037d13..1a6cad71fc 100644 --- a/schemas/src/main/resources/model/model.xml +++ b/schemas/src/main/resources/model/model.xml @@ -1,24 +1,10 @@ + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xmlns="http://www.liquibase.org/xml/ns/dbchangelog" + xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-4.4.xsd"> - - create schema if not exists apollo_kernel - - - - - - - - - - \ No newline at end of file + + create schema if not exists apollo_kernel + + diff --git a/sql-state/src/main/java/com/salesforce/apollo/state/Emulator.java b/sql-state/src/main/java/com/salesforce/apollo/state/Emulator.java index 0b76a9e375..944f5d7383 100644 --- a/sql-state/src/main/java/com/salesforce/apollo/state/Emulator.java +++ b/sql-state/src/main/java/com/salesforce/apollo/state/Emulator.java @@ -6,9 +6,9 @@ */ package com.salesforce.apollo.state; -import com.salesfoce.apollo.choam.proto.*; -import com.salesfoce.apollo.choam.proto.SubmitResult.Result; -import com.salesfoce.apollo.state.proto.Txn; +import com.salesforce.apollo.choam.proto.*; +import com.salesforce.apollo.choam.proto.SubmitResult.Result; +import com.salesforce.apollo.state.proto.Txn; import com.salesforce.apollo.choam.CHOAM; import com.salesforce.apollo.choam.CHOAM.TransactionExecutor; import com.salesforce.apollo.choam.Parameters; diff --git a/sql-state/src/main/java/com/salesforce/apollo/state/Mutator.java b/sql-state/src/main/java/com/salesforce/apollo/state/Mutator.java index 32d96d05b3..3638cb89ba 100644 --- a/sql-state/src/main/java/com/salesforce/apollo/state/Mutator.java +++ b/sql-state/src/main/java/com/salesforce/apollo/state/Mutator.java @@ -8,8 +8,8 @@ import com.google.protobuf.ByteString; import com.google.protobuf.Message; -import com.salesfoce.apollo.state.proto.*; -import com.salesfoce.apollo.state.proto.ChangeLog.Builder; +import com.salesforce.apollo.state.proto.*; +import com.salesforce.apollo.state.proto.ChangeLog.Builder; import com.salesforce.apollo.choam.Session; import com.salesforce.apollo.choam.support.InvalidTransaction; import com.salesforce.apollo.state.SqlStateMachine.CallResult; diff --git a/sql-state/src/main/java/com/salesforce/apollo/state/SqlStateMachine.java b/sql-state/src/main/java/com/salesforce/apollo/state/SqlStateMachine.java index 3b8fd6aa20..66eaf16484 100644 --- a/sql-state/src/main/java/com/salesforce/apollo/state/SqlStateMachine.java +++ b/sql-state/src/main/java/com/salesforce/apollo/state/SqlStateMachine.java @@ -10,9 +10,9 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.protobuf.InvalidProtocolBufferException; -import com.salesfoce.apollo.choam.proto.Transaction; -import com.salesfoce.apollo.state.proto.Statement; -import com.salesfoce.apollo.state.proto.*; +import com.salesforce.apollo.choam.proto.Transaction; +import com.salesforce.apollo.state.proto.Statement; +import com.salesforce.apollo.state.proto.*; import com.salesforce.apollo.choam.CHOAM.TransactionExecutor; import com.salesforce.apollo.choam.Session; import com.salesforce.apollo.choam.support.CheckpointState; diff --git a/sql-state/src/test/java/com/salesforce/apollo/state/AbstractLifecycleTest.java b/sql-state/src/test/java/com/salesforce/apollo/state/AbstractLifecycleTest.java index fae0362d83..4e22250d2e 100644 --- a/sql-state/src/test/java/com/salesforce/apollo/state/AbstractLifecycleTest.java +++ b/sql-state/src/test/java/com/salesforce/apollo/state/AbstractLifecycleTest.java @@ -6,8 +6,8 @@ */ package com.salesforce.apollo.state; -import com.salesfoce.apollo.choam.proto.Transaction; -import com.salesfoce.apollo.state.proto.Txn; +import com.salesforce.apollo.choam.proto.Transaction; +import com.salesforce.apollo.state.proto.Txn; import com.salesforce.apollo.archipelago.LocalServer; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.ServerConnectionCache; diff --git a/sql-state/src/test/java/com/salesforce/apollo/state/CHOAMTest.java b/sql-state/src/test/java/com/salesforce/apollo/state/CHOAMTest.java index 881d094dc2..6f69f1621a 100644 --- a/sql-state/src/test/java/com/salesforce/apollo/state/CHOAMTest.java +++ b/sql-state/src/test/java/com/salesforce/apollo/state/CHOAMTest.java @@ -8,8 +8,8 @@ import com.codahale.metrics.ConsoleReporter; import com.codahale.metrics.MetricRegistry; -import com.salesfoce.apollo.choam.proto.Transaction; -import com.salesfoce.apollo.state.proto.Txn; +import com.salesforce.apollo.choam.proto.Transaction; +import com.salesforce.apollo.state.proto.Txn; import com.salesforce.apollo.archipelago.LocalServer; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.ServerConnectionCache; diff --git a/sql-state/src/test/java/com/salesforce/apollo/state/MigrationTest.java b/sql-state/src/test/java/com/salesforce/apollo/state/MigrationTest.java index 08e6cceeaa..d53229b32b 100644 --- a/sql-state/src/test/java/com/salesforce/apollo/state/MigrationTest.java +++ b/sql-state/src/test/java/com/salesforce/apollo/state/MigrationTest.java @@ -27,10 +27,10 @@ import org.junit.jupiter.api.Test; import com.google.protobuf.Message; -import com.salesfoce.apollo.choam.proto.Transaction; -import com.salesfoce.apollo.state.proto.ChangeLog; -import com.salesfoce.apollo.state.proto.Migration; -import com.salesfoce.apollo.state.proto.Txn; +import com.salesforce.apollo.choam.proto.Transaction; +import com.salesforce.apollo.state.proto.ChangeLog; +import com.salesforce.apollo.state.proto.Migration; +import com.salesforce.apollo.state.proto.Txn; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; @@ -38,7 +38,6 @@ /** * @author hal.hildebrand - * */ public class MigrationTest { @@ -53,7 +52,8 @@ public static List initializeBookSchema() { var list = new ArrayList(); list.add(Txn.newBuilder() .setMigration(MigrationTest.bookSchemaMigration()) - .setBatch(batch("create table books (id int, title varchar(50), author varchar(50), price float, qty int, primary key (id))")) + .setBatch(batch( + "create table books (id int, title varchar(50), author varchar(50), price float, qty int, primary key (id))")) .build()); return list; } @@ -67,22 +67,20 @@ public void rollback() throws Exception { Migration migration = Migration.newBuilder().setTag("test-1").build(); CompletableFuture success = new CompletableFuture<>(); - executor.execute(0, Digest.NONE, - Transaction.newBuilder() - .setContent(Txn.newBuilder().setMigration(migration).build().toByteString()) - .build(), - success, r -> r.run()); + executor.execute(0, Digest.NONE, Transaction.newBuilder() + .setContent( + Txn.newBuilder().setMigration(migration).build().toByteString()) + .build(), success, r -> r.run()); executor.beginBlock(ULong.valueOf(1), DigestAlgorithm.DEFAULT.getOrigin().prefix("voo")); migration = Migration.newBuilder().setUpdate(Mutator.changeLog(BOOK_RESOURCE_PATH, BOOK_SCHEMA_ROOT)).build(); success = new CompletableFuture<>(); - executor.execute(0, Digest.NONE, - Transaction.newBuilder() - .setContent(Txn.newBuilder().setMigration(migration).build().toByteString()) - .build(), - success, r -> r.run()); + executor.execute(0, Digest.NONE, Transaction.newBuilder() + .setContent( + Txn.newBuilder().setMigration(migration).build().toByteString()) + .build(), success, r -> r.run()); success.get(1, TimeUnit.SECONDS); @@ -93,11 +91,12 @@ public void rollback() throws Exception { assertFalse(cb.next(), "Should not exist"); Transaction.Builder builder = Transaction.newBuilder(); builder.setContent(Txn.newBuilder() - .setBatch(batch("insert into test.books values (1001, 'Java for dummies', 'Tan Ah Teck', 11.11, 11)", - "insert into test.books values (1002, 'More Java for dummies', 'Tan Ah Teck', 22.22, 22)", - "insert into test.books values (1003, 'More Java for more dummies', 'Mohammad Ali', 33.33, 33)", - "insert into test.books values (1004, 'A Cup of Java', 'Kumar', 44.44, 44)", - "insert into test.books values (1005, 'A Teaspoon of Java', 'Kevin Jones', 55.55, 55)")) + .setBatch(batch( + "insert into test.books values (1001, 'Java for dummies', 'Tan Ah Teck', 11.11, 11)", + "insert into test.books values (1002, 'More Java for dummies', 'Tan Ah Teck', 22.22, 22)", + "insert into test.books values (1003, 'More Java for more dummies', 'Mohammad Ali', 33.33, 33)", + "insert into test.books values (1004, 'A Cup of Java', 'Kumar', 44.44, 44)", + "insert into test.books values (1005, 'A Teaspoon of Java', 'Kevin Jones', 55.55, 55)")) .build() .toByteString()); Transaction transaction = builder.build(); @@ -120,11 +119,10 @@ public void rollback() throws Exception { executor.beginBlock(ULong.valueOf(2), DigestAlgorithm.DEFAULT.getOrigin().prefix("foo")); - executor.execute(1, Digest.NONE, - Transaction.newBuilder() - .setContent(Txn.newBuilder().setMigration(migration).build().toByteString()) - .build(), - success, r -> r.run()); + executor.execute(1, Digest.NONE, Transaction.newBuilder() + .setContent( + Txn.newBuilder().setMigration(migration).build().toByteString()) + .build(), success, r -> r.run()); success.get(1, TimeUnit.SECONDS); @@ -149,11 +147,10 @@ public void update() throws Exception { .build(); CompletableFuture success = new CompletableFuture<>(); - executor.execute(0, Digest.NONE, - Transaction.newBuilder() - .setContent(Txn.newBuilder().setMigration(migration).build().toByteString()) - .build(), - success, r -> r.run()); + executor.execute(0, Digest.NONE, Transaction.newBuilder() + .setContent( + Txn.newBuilder().setMigration(migration).build().toByteString()) + .build(), success, r -> r.run()); success.get(1, TimeUnit.SECONDS); @@ -164,11 +161,12 @@ public void update() throws Exception { assertFalse(cb.next(), "Should not exist"); Transaction.Builder builder = Transaction.newBuilder(); builder.setContent(Txn.newBuilder() - .setBatch(batch("insert into test.books values (1001, 'Java for dummies', 'Tan Ah Teck', 11.11, 11)", - "insert into test.books values (1002, 'More Java for dummies', 'Tan Ah Teck', 22.22, 22)", - "insert into test.books values (1003, 'More Java for more dummies', 'Mohammad Ali', 33.33, 33)", - "insert into test.books values (1004, 'A Cup of Java', 'Kumar', 44.44, 44)", - "insert into test.books values (1005, 'A Teaspoon of Java', 'Kevin Jones', 55.55, 55)")) + .setBatch(batch( + "insert into test.books values (1001, 'Java for dummies', 'Tan Ah Teck', 11.11, 11)", + "insert into test.books values (1002, 'More Java for dummies', 'Tan Ah Teck', 22.22, 22)", + "insert into test.books values (1003, 'More Java for more dummies', 'Mohammad Ali', 33.33, 33)", + "insert into test.books values (1004, 'A Cup of Java', 'Kumar', 44.44, 44)", + "insert into test.books values (1005, 'A Teaspoon of Java', 'Kevin Jones', 55.55, 55)")) .build() .toByteString()); Transaction transaction = builder.build(); diff --git a/sql-state/src/test/java/com/salesforce/apollo/state/MutatorTest.java b/sql-state/src/test/java/com/salesforce/apollo/state/MutatorTest.java index 1dad0db321..cd4d173c1b 100644 --- a/sql-state/src/test/java/com/salesforce/apollo/state/MutatorTest.java +++ b/sql-state/src/test/java/com/salesforce/apollo/state/MutatorTest.java @@ -22,9 +22,9 @@ import org.junit.jupiter.api.Test; -import com.salesfoce.apollo.choam.proto.Transaction; -import com.salesfoce.apollo.state.proto.Migration; -import com.salesfoce.apollo.state.proto.Txn; +import com.salesforce.apollo.choam.proto.Transaction; +import com.salesforce.apollo.state.proto.Migration; +import com.salesforce.apollo.state.proto.Txn; import com.salesforce.apollo.choam.Session; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; @@ -32,7 +32,6 @@ /** * @author hal.hildebrand - * */ public class MutatorTest { public static final Path MUT_RESOURCE_PATH = Path.of("src", "test", "resources", "mutator-testing"); @@ -57,21 +56,19 @@ public void smokin() throws Exception { .build(); CompletableFuture success = new CompletableFuture<>(); - executor.execute(0, Digest.NONE, - Transaction.newBuilder() - .setContent(Txn.newBuilder().setMigration(migration).build().toByteString()) - .build(), - success, r -> r.run()); + executor.execute(0, Digest.NONE, Transaction.newBuilder() + .setContent( + Txn.newBuilder().setMigration(migration).build().toByteString()) + .build(), success, r -> r.run()); success.get(1, TimeUnit.SECONDS); migration = Migration.newBuilder().setUpdate(Mutator.changeLog(MUT_RESOURCE_PATH, MUT_SCHEMA_ROOT)).build(); success = new CompletableFuture<>(); - executor.execute(1, Digest.NONE, - Transaction.newBuilder() - .setContent(Txn.newBuilder().setMigration(migration).build().toByteString()) - .build(), - success, r -> r.run()); + executor.execute(1, Digest.NONE, Transaction.newBuilder() + .setContent( + Txn.newBuilder().setMigration(migration).build().toByteString()) + .build(), success, r -> r.run()); success.get(1, TimeUnit.SECONDS); @@ -91,11 +88,9 @@ public void smokin() throws Exception { var call = mutator.call(sql, Collections.singletonList(JDBCType.INTEGER)); success = new CompletableFuture<>(); - executor.execute(1, Digest.NONE, - Transaction.newBuilder() - .setContent(Txn.newBuilder().setCall(call).build().toByteString()) - .build(), - success, r -> r.run()); + executor.execute(1, Digest.NONE, Transaction.newBuilder() + .setContent(Txn.newBuilder().setCall(call).build().toByteString()) + .build(), success, r -> r.run()); CallResult result = (CallResult) success.get(1, TimeUnit.SECONDS); assertNotNull(result); @@ -108,11 +103,10 @@ public void smokin() throws Exception { batch.build(); success = new CompletableFuture<>(); - executor.execute(2, Digest.NONE, - Transaction.newBuilder() - .setContent(Txn.newBuilder().setBatched(batch.build()).build().toByteString()) - .build(), - success, r -> r.run()); + executor.execute(2, Digest.NONE, Transaction.newBuilder() + .setContent( + Txn.newBuilder().setBatched(batch.build()).build().toByteString()) + .build(), success, r -> r.run()); var batchResult = (List) success.get(1, TimeUnit.SECONDS); assertNotNull(batchResult); diff --git a/sql-state/src/test/java/com/salesforce/apollo/state/ScriptTest.java b/sql-state/src/test/java/com/salesforce/apollo/state/ScriptTest.java index b0820f5446..3f472bf87a 100644 --- a/sql-state/src/test/java/com/salesforce/apollo/state/ScriptTest.java +++ b/sql-state/src/test/java/com/salesforce/apollo/state/ScriptTest.java @@ -6,8 +6,8 @@ */ package com.salesforce.apollo.state; -import com.salesfoce.apollo.choam.proto.Transaction; -import com.salesfoce.apollo.state.proto.Txn; +import com.salesforce.apollo.choam.proto.Transaction; +import com.salesforce.apollo.state.proto.Txn; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import org.junit.jupiter.api.Test; diff --git a/sql-state/src/test/java/com/salesforce/apollo/state/Transactioneer.java b/sql-state/src/test/java/com/salesforce/apollo/state/Transactioneer.java index 13a60756db..da88d2fd1b 100644 --- a/sql-state/src/test/java/com/salesforce/apollo/state/Transactioneer.java +++ b/sql-state/src/test/java/com/salesforce/apollo/state/Transactioneer.java @@ -6,8 +6,8 @@ */ package com.salesforce.apollo.state; -import com.salesfoce.apollo.state.proto.Txn; import com.salesforce.apollo.choam.support.InvalidTransaction; +import com.salesforce.apollo.state.proto.Txn; import com.salesforce.apollo.utils.Utils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -21,20 +21,20 @@ import java.util.function.Supplier; class Transactioneer { - private final static Random entropy = new Random(); - private final static Logger log = LoggerFactory.getLogger(Transactioneer.class); - private static final Executor executor = Executors.newVirtualThreadPerTaskExecutor(); - private final static ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1, - Thread.ofVirtual() - .factory()); - private final AtomicInteger completed = new AtomicInteger(); - private final CountDownLatch countdown; - private final AtomicReference inFlight = new AtomicReference<>(); - private final int max; - private final Mutator mutator; - private final Duration timeout; - private final Supplier update; - private final AtomicBoolean finished = new AtomicBoolean(); + private final static Random entropy = new Random(); + private final static Logger log = LoggerFactory.getLogger(Transactioneer.class); + private final static ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1, Thread.ofVirtual() + .factory()); + + private final Executor executor = Executors.newVirtualThreadPerTaskExecutor(); + private final AtomicInteger completed = new AtomicInteger(); + private final CountDownLatch countdown; + private final AtomicReference inFlight = new AtomicReference<>(); + private final int max; + private final Mutator mutator; + private final Duration timeout; + private final Supplier update; + private final AtomicBoolean finished = new AtomicBoolean(); public Transactioneer(Supplier update, Mutator mutator, Duration timeout, int max, CountDownLatch countdown) { this.update = update; diff --git a/sql-state/src/test/java/com/salesforce/apollo/state/UpdaterTest.java b/sql-state/src/test/java/com/salesforce/apollo/state/UpdaterTest.java index fe72409d3c..b2522a4b33 100644 --- a/sql-state/src/test/java/com/salesforce/apollo/state/UpdaterTest.java +++ b/sql-state/src/test/java/com/salesforce/apollo/state/UpdaterTest.java @@ -26,15 +26,14 @@ import org.joou.ULong; import org.junit.jupiter.api.Test; -import com.salesfoce.apollo.choam.proto.Transaction; -import com.salesfoce.apollo.state.proto.Txn; +import com.salesforce.apollo.choam.proto.Transaction; +import com.salesforce.apollo.state.proto.Txn; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.state.SqlStateMachine.Event; /** * @author hal.hildebrand - * */ public class UpdaterTest { @@ -106,15 +105,17 @@ public void smoke() throws Exception { Connection connection = updater.newConnection(); Statement statement = connection.createStatement(); - statement.execute("create table books (id int, title varchar(50), author varchar(50), price float, qty int, primary key (id))"); + statement.execute( + "create table books (id int, title varchar(50), author varchar(50), price float, qty int, primary key (id))"); Transaction.Builder builder = Transaction.newBuilder(); builder.setContent(Txn.newBuilder() - .setBatch(batch("insert into books values (1001, 'Java for dummies', 'Tan Ah Teck', 11.11, 11)", - "insert into books values (1002, 'More Java for dummies', 'Tan Ah Teck', 22.22, 22)", - "insert into books values (1003, 'More Java for more dummies', 'Mohammad Ali', 33.33, 33)", - "insert into books values (1004, 'A Cup of Java', 'Kumar', 44.44, 44)", - "insert into books values (1005, 'A Teaspoon of Java', 'Kevin Jones', 55.55, 55)")) + .setBatch( + batch("insert into books values (1001, 'Java for dummies', 'Tan Ah Teck', 11.11, 11)", + "insert into books values (1002, 'More Java for dummies', 'Tan Ah Teck', 22.22, 22)", + "insert into books values (1003, 'More Java for more dummies', 'Mohammad Ali', 33.33, 33)", + "insert into books values (1004, 'A Cup of Java', 'Kumar', 44.44, 44)", + "insert into books values (1005, 'A Teaspoon of Java', 'Kevin Jones', 55.55, 55)")) .build() .toByteString()); Transaction transaction = builder.build(); diff --git a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/binder/BinderClient.java b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/binder/BinderClient.java index d81a38571f..295d36f4b1 100644 --- a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/binder/BinderClient.java +++ b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/binder/BinderClient.java @@ -12,9 +12,9 @@ import com.codahale.metrics.Timer.Context; import com.google.common.util.concurrent.ListenableFuture; import com.google.protobuf.Empty; -import com.salesfoce.apollo.stereotomy.event.proto.Ident; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.BinderGrpc; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.BinderGrpc.BinderFutureStub; +import com.salesforce.apollo.stereotomy.event.proto.Ident; +import com.salesforce.apollo.stereotomy.services.grpc.proto.BinderGrpc; +import com.salesforce.apollo.stereotomy.services.grpc.proto.BinderGrpc.BinderFutureStub; import com.salesforce.apollo.archipelago.ManagedServerChannel; import com.salesforce.apollo.archipelago.ServerConnectionCache.CreateClientCommunications; import com.salesforce.apollo.membership.Member; @@ -22,29 +22,27 @@ /** * @author hal.hildebrand - * */ public class BinderClient implements BinderService { - public static CreateClientCommunications getCreate(StereotomyMetrics metrics) { - return (c) -> { - return new BinderClient(c, metrics); - }; - - } - private final ManagedServerChannel channel; private final BinderFutureStub client; private final StereotomyMetrics metrics; - public BinderClient(ManagedServerChannel channel, StereotomyMetrics metrics) { this.channel = channel; this.client = BinderGrpc.newFutureStub(channel).withCompression("gzip"); this.metrics = metrics; } + public static CreateClientCommunications getCreate(StereotomyMetrics metrics) { + return (c) -> { + return new BinderClient(c, metrics); + }; + + } + @Override - public CompletableFuture bind(com.salesfoce.apollo.stereotomy.event.proto.Binding binding) { + public CompletableFuture bind(com.salesforce.apollo.stereotomy.event.proto.Binding binding) { Context timer = metrics == null ? null : metrics.bindClient().time(); if (metrics != null) { metrics.outboundBandwidth().mark(binding.getSerializedSize()); diff --git a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/binder/BinderServer.java b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/binder/BinderServer.java index 23e9311602..8840f8f10c 100644 --- a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/binder/BinderServer.java +++ b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/binder/BinderServer.java @@ -8,9 +8,9 @@ import com.codahale.metrics.Timer.Context; import com.google.protobuf.Empty; -import com.salesfoce.apollo.stereotomy.event.proto.Binding; -import com.salesfoce.apollo.stereotomy.event.proto.Ident; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.BinderGrpc.BinderImplBase; +import com.salesforce.apollo.stereotomy.event.proto.Binding; +import com.salesforce.apollo.stereotomy.event.proto.Ident; +import com.salesforce.apollo.stereotomy.services.grpc.proto.BinderGrpc.BinderImplBase; import com.salesforce.apollo.archipelago.RoutableService; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.protocols.ClientIdentity; @@ -21,13 +21,12 @@ /** * @author hal.hildebrand - * */ public class BinderServer extends BinderImplBase { - private ClientIdentity identity; private final StereotomyMetrics metrics; private final RoutableService routing; + private ClientIdentity identity; public BinderServer(RoutableService router, ClientIdentity identity, StereotomyMetrics metrics) { this.metrics = metrics; diff --git a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/kerl/CommonKERLClient.java b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/kerl/CommonKERLClient.java index 23254b4d0a..707744855e 100644 --- a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/kerl/CommonKERLClient.java +++ b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/kerl/CommonKERLClient.java @@ -8,8 +8,8 @@ import com.codahale.metrics.Timer.Context; import com.google.protobuf.Empty; -import com.salesfoce.apollo.stereotomy.event.proto.*; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.*; +import com.salesforce.apollo.stereotomy.event.proto.*; +import com.salesforce.apollo.stereotomy.services.grpc.proto.*; import com.salesforce.apollo.membership.Member; import com.salesforce.apollo.stereotomy.services.grpc.StereotomyMetrics; import com.salesforce.apollo.stereotomy.services.proto.ProtoKERLService; diff --git a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/kerl/KERLAdapter.java b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/kerl/KERLAdapter.java index 36347bc6a6..91183ff757 100644 --- a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/kerl/KERLAdapter.java +++ b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/kerl/KERLAdapter.java @@ -6,7 +6,6 @@ */ package com.salesforce.apollo.stereotomy.services.grpc.kerl; -import com.salesfoce.apollo.stereotomy.event.proto.*; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.JohnHancock; import com.salesforce.apollo.stereotomy.EventCoordinates; @@ -15,6 +14,7 @@ import com.salesforce.apollo.stereotomy.event.AttachmentEvent; import com.salesforce.apollo.stereotomy.event.AttachmentEvent.Attachment; import com.salesforce.apollo.stereotomy.event.KeyEvent; +import com.salesforce.apollo.stereotomy.event.proto.*; import com.salesforce.apollo.stereotomy.event.protobuf.KeyStateImpl; import com.salesforce.apollo.stereotomy.event.protobuf.ProtobufEventFactory; import com.salesforce.apollo.stereotomy.identifier.Identifier; @@ -29,7 +29,7 @@ /** * @author hal.hildebrand */ -public class KERLAdapter implements KERL { +public class KERLAdapter implements KERL.AppendKERL { private final DigestAlgorithm algorithm; private final ProtoKERLService kerl; @@ -81,7 +81,7 @@ public Void appendValidations(EventCoordinates coordinates, Map kerl(Identifier identifier) { .map(kwa -> ProtobufEventFactory.from(kwa)) .toList(); } - - @Override - public KeyState getKeyState(Identifier identifier, ULong sequenceNumber) { - var keyState = kerl.getKeyState(identifier.toIdent(), sequenceNumber.longValue()); - return keyState == null ? null : new KeyStateImpl(keyState); - } } diff --git a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/kerl/KERLClient.java b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/kerl/KERLClient.java index 21d8066d1c..71cce9265c 100644 --- a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/kerl/KERLClient.java +++ b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/kerl/KERLClient.java @@ -6,7 +6,7 @@ */ package com.salesforce.apollo.stereotomy.services.grpc.kerl; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.KERLServiceGrpc; +import com.salesforce.apollo.stereotomy.services.grpc.proto.KERLServiceGrpc; import com.salesforce.apollo.archipelago.ManagedServerChannel; import com.salesforce.apollo.archipelago.ServerConnectionCache.CreateClientCommunications; import com.salesforce.apollo.membership.Member; diff --git a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/kerl/KERLServer.java b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/kerl/KERLServer.java index 475b4835c4..4c55063dce 100644 --- a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/kerl/KERLServer.java +++ b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/kerl/KERLServer.java @@ -8,9 +8,9 @@ import com.codahale.metrics.Timer.Context; import com.google.protobuf.Empty; -import com.salesfoce.apollo.stereotomy.event.proto.*; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.*; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.KERLServiceGrpc.KERLServiceImplBase; +import com.salesforce.apollo.stereotomy.event.proto.*; +import com.salesforce.apollo.stereotomy.services.grpc.proto.*; +import com.salesforce.apollo.stereotomy.services.grpc.proto.KERLServiceGrpc.KERLServiceImplBase; import com.salesforce.apollo.archipelago.RoutableService; import com.salesforce.apollo.stereotomy.services.grpc.StereotomyMetrics; import com.salesforce.apollo.stereotomy.services.proto.ProtoKERLService; @@ -20,7 +20,7 @@ * @author hal.hildebrand */ public class KERLServer extends KERLServiceImplBase { - private final StereotomyMetrics metrics; + private final StereotomyMetrics metrics; private final RoutableService routing; public KERLServer(RoutableService router, StereotomyMetrics metrics) { @@ -45,7 +45,7 @@ public void append(KeyEventsContext request, StreamObserver responseO timer.stop(); } var states = result == null ? KeyStates.getDefaultInstance() - : KeyStates.newBuilder().addAllKeyStates(result).build(); + : KeyStates.newBuilder().addAllKeyStates(result).build(); responseObserver.onNext(states); responseObserver.onCompleted(); if (metrics != null) { @@ -86,8 +86,8 @@ public void appendKERL(KERLContext request, StreamObserver responseOb if (timer != null) { timer.stop(); } - var results = result == null ? KeyStates.getDefaultInstance() - : KeyStates.newBuilder().addAllKeyStates(result).build(); + var results = + result == null ? KeyStates.getDefaultInstance() : KeyStates.newBuilder().addAllKeyStates(result).build(); responseObserver.onNext(results); responseObserver.onCompleted(); if (metrics != null) { @@ -128,8 +128,8 @@ public void appendWithAttachments(KeyEventWithAttachmentsContext request, if (timer != null) { timer.stop(); } - var states = result == null ? KeyStates.getDefaultInstance() - : KeyStates.newBuilder().addAllKeyStates(result).build(); + var states = + result == null ? KeyStates.getDefaultInstance() : KeyStates.newBuilder().addAllKeyStates(result).build(); responseObserver.onNext(states); responseObserver.onCompleted(); if (metrics != null) { diff --git a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/observer/EventObserver.java b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/observer/EventObserver.java index 08cc519c83..527ea22f09 100644 --- a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/observer/EventObserver.java +++ b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/observer/EventObserver.java @@ -8,21 +8,20 @@ import java.util.List; -import com.salesfoce.apollo.stereotomy.event.proto.AttachmentEvent; -import com.salesfoce.apollo.stereotomy.event.proto.KERL_; -import com.salesfoce.apollo.stereotomy.event.proto.KeyEvent_; -import com.salesfoce.apollo.stereotomy.event.proto.Validations; +import com.salesforce.apollo.stereotomy.event.proto.AttachmentEvent; +import com.salesforce.apollo.stereotomy.event.proto.KERL_; +import com.salesforce.apollo.stereotomy.event.proto.KeyEvent_; +import com.salesforce.apollo.stereotomy.event.proto.Validations; import com.salesforce.apollo.cryptography.Digest; /** * @author hal.hildebrand - * */ public interface EventObserver { - void publish(KERL_ kerl, List validations, Digest from); + void publish(KERL_ kerl, List validations, Digest from); void publishAttachments(List attachments, Digest from); - void publishEvents(List events, List validations, Digest from); + void publishEvents(List events, List validations, Digest from); } diff --git a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/observer/EventObserverClient.java b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/observer/EventObserverClient.java index be99407de2..8d038b43ab 100644 --- a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/observer/EventObserverClient.java +++ b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/observer/EventObserverClient.java @@ -7,15 +7,15 @@ package com.salesforce.apollo.stereotomy.services.grpc.observer; import com.codahale.metrics.Timer.Context; -import com.salesfoce.apollo.stereotomy.event.proto.AttachmentEvent; -import com.salesfoce.apollo.stereotomy.event.proto.KERL_; -import com.salesfoce.apollo.stereotomy.event.proto.KeyEvent_; -import com.salesfoce.apollo.stereotomy.event.proto.Validations; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.AttachmentsContext; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.EventObserverGrpc; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.EventObserverGrpc.EventObserverFutureStub; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.KERLContext; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.KeyEventsContext; +import com.salesforce.apollo.stereotomy.event.proto.AttachmentEvent; +import com.salesforce.apollo.stereotomy.event.proto.KERL_; +import com.salesforce.apollo.stereotomy.event.proto.KeyEvent_; +import com.salesforce.apollo.stereotomy.event.proto.Validations; +import com.salesforce.apollo.stereotomy.services.grpc.proto.AttachmentsContext; +import com.salesforce.apollo.stereotomy.services.grpc.proto.EventObserverGrpc; +import com.salesforce.apollo.stereotomy.services.grpc.proto.EventObserverGrpc.EventObserverFutureStub; +import com.salesforce.apollo.stereotomy.services.grpc.proto.KERLContext; +import com.salesforce.apollo.stereotomy.services.grpc.proto.KeyEventsContext; import com.salesforce.apollo.archipelago.ManagedServerChannel; import com.salesforce.apollo.archipelago.ServerConnectionCache.CreateClientCommunications; import com.salesforce.apollo.membership.Member; @@ -30,9 +30,9 @@ */ public class EventObserverClient implements EventObserverService { - private final ManagedServerChannel channel; + private final ManagedServerChannel channel; private final EventObserverFutureStub client; - private final StereotomyMetrics metrics; + private final StereotomyMetrics metrics; public EventObserverClient(ManagedServerChannel channel, StereotomyMetrics metrics) { this.channel = channel; @@ -115,9 +115,9 @@ public void publishAttachments(List attachments) { public void publishEvents(List events, List validations) { Context timer = metrics == null ? null : metrics.publishEventsClient().time(); KeyEventsContext request = KeyEventsContext.newBuilder() - .addAllKeyEvent(events) - .addAllValidations(validations) - .build(); + .addAllKeyEvent(events) + .addAllValidations(validations) + .build(); if (metrics != null) { metrics.outboundBandwidth().mark(request.getSerializedSize()); metrics.outboundPublishEventsRequest().mark(request.getSerializedSize()); diff --git a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/observer/EventObserverServer.java b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/observer/EventObserverServer.java index 0e870c8391..6785477e59 100644 --- a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/observer/EventObserverServer.java +++ b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/observer/EventObserverServer.java @@ -8,10 +8,10 @@ import com.codahale.metrics.Timer.Context; import com.google.protobuf.Empty; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.AttachmentsContext; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.EventObserverGrpc.EventObserverImplBase; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.KERLContext; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.KeyEventsContext; +import com.salesforce.apollo.stereotomy.services.grpc.proto.AttachmentsContext; +import com.salesforce.apollo.stereotomy.services.grpc.proto.EventObserverGrpc.EventObserverImplBase; +import com.salesforce.apollo.stereotomy.services.grpc.proto.KERLContext; +import com.salesforce.apollo.stereotomy.services.grpc.proto.KeyEventsContext; import com.salesforce.apollo.archipelago.RoutableService; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.protocols.ClientIdentity; @@ -23,8 +23,8 @@ */ public class EventObserverServer extends EventObserverImplBase { - private final ClientIdentity identity; - private final StereotomyMetrics metrics; + private final ClientIdentity identity; + private final StereotomyMetrics metrics; private final RoutableService routing; public EventObserverServer(RoutableService router, ClientIdentity identity, diff --git a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/resolver/ResolverClient.java b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/resolver/ResolverClient.java index 6a547b85fc..18db711ece 100644 --- a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/resolver/ResolverClient.java +++ b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/resolver/ResolverClient.java @@ -9,10 +9,10 @@ import java.util.Optional; import com.codahale.metrics.Timer.Context; -import com.salesfoce.apollo.stereotomy.event.proto.Binding; -import com.salesfoce.apollo.stereotomy.event.proto.Ident; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.ResolverGrpc; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.ResolverGrpc.ResolverBlockingStub; +import com.salesforce.apollo.stereotomy.event.proto.Binding; +import com.salesforce.apollo.stereotomy.event.proto.Ident; +import com.salesforce.apollo.stereotomy.services.grpc.proto.ResolverGrpc; +import com.salesforce.apollo.stereotomy.services.grpc.proto.ResolverGrpc.ResolverBlockingStub; import com.salesforce.apollo.archipelago.ManagedServerChannel; import com.salesforce.apollo.archipelago.ServerConnectionCache.CreateClientCommunications; import com.salesforce.apollo.membership.Member; @@ -20,27 +20,25 @@ /** * @author hal.hildebrand - * */ public class ResolverClient implements ResolverService { - public static CreateClientCommunications getCreate(StereotomyMetrics metrics) { - return (c) -> { - return new ResolverClient(c, metrics); - }; - - } - private final ManagedServerChannel channel; private final ResolverBlockingStub client; private final StereotomyMetrics metrics; - public ResolverClient(ManagedServerChannel channel, StereotomyMetrics metrics) { this.channel = channel; this.client = ResolverGrpc.newBlockingStub(channel).withCompression("gzip"); this.metrics = metrics; } + public static CreateClientCommunications getCreate(StereotomyMetrics metrics) { + return (c) -> { + return new ResolverClient(c, metrics); + }; + + } + @Override public void close() { channel.release(); diff --git a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/resolver/ResolverServer.java b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/resolver/ResolverServer.java index db21ad0a7a..cac9410156 100644 --- a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/resolver/ResolverServer.java +++ b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/resolver/ResolverServer.java @@ -9,9 +9,9 @@ import java.util.Optional; import com.codahale.metrics.Timer.Context; -import com.salesfoce.apollo.stereotomy.event.proto.Binding; -import com.salesfoce.apollo.stereotomy.event.proto.Ident; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.ResolverGrpc.ResolverImplBase; +import com.salesforce.apollo.stereotomy.event.proto.Binding; +import com.salesforce.apollo.stereotomy.event.proto.Ident; +import com.salesforce.apollo.stereotomy.services.grpc.proto.ResolverGrpc.ResolverImplBase; import com.salesforce.apollo.archipelago.RoutableService; import com.salesforce.apollo.stereotomy.services.grpc.StereotomyMetrics; import com.salesforce.apollo.stereotomy.services.proto.ProtoResolver; @@ -20,7 +20,6 @@ /** * @author hal.hildebrand - * */ public class ResolverServer extends ResolverImplBase { diff --git a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/validation/EventValidationClient.java b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/validation/EventValidationClient.java index 1b7719173d..4251a69104 100644 --- a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/validation/EventValidationClient.java +++ b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/validation/EventValidationClient.java @@ -13,10 +13,10 @@ import com.codahale.metrics.Timer.Context; import com.google.common.util.concurrent.ListenableFuture; import com.google.protobuf.BoolValue; -import com.salesfoce.apollo.stereotomy.event.proto.KeyEvent_; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.KeyEventContext; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.ValidatorGrpc; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.ValidatorGrpc.ValidatorFutureStub; +import com.salesforce.apollo.stereotomy.event.proto.KeyEvent_; +import com.salesforce.apollo.stereotomy.services.grpc.proto.KeyEventContext; +import com.salesforce.apollo.stereotomy.services.grpc.proto.ValidatorGrpc; +import com.salesforce.apollo.stereotomy.services.grpc.proto.ValidatorGrpc.ValidatorFutureStub; import com.salesforce.apollo.archipelago.ManagedServerChannel; import com.salesforce.apollo.archipelago.ServerConnectionCache.CreateClientCommunications; import com.salesforce.apollo.membership.Member; @@ -25,10 +25,18 @@ /** * @author hal.hildebrand - * */ public class EventValidationClient implements EventValidationService { + private final ManagedServerChannel channel; + private final ValidatorFutureStub client; + private final StereotomyMetrics metrics; + public EventValidationClient(ManagedServerChannel channel, StereotomyMetrics metrics) { + this.channel = channel; + this.client = ValidatorGrpc.newFutureStub(channel).withCompression("gzip"); + this.metrics = metrics; + } + public static CreateClientCommunications getCreate(StereotomyMetrics metrics) { return (c) -> { return new EventValidationClient(c, metrics); @@ -55,16 +63,6 @@ public CompletableFuture validate(KeyEvent_ event) { }; } - private final ManagedServerChannel channel; - private final ValidatorFutureStub client; - private final StereotomyMetrics metrics; - - public EventValidationClient(ManagedServerChannel channel, StereotomyMetrics metrics) { - this.channel = channel; - this.client = ValidatorGrpc.newFutureStub(channel).withCompression("gzip"); - this.metrics = metrics; - } - @Override public void close() { channel.release(); diff --git a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/validation/EventValidationServer.java b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/validation/EventValidationServer.java index 3313944253..373f90a01e 100644 --- a/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/validation/EventValidationServer.java +++ b/stereotomy-services/src/main/java/com/salesforce/apollo/stereotomy/services/grpc/validation/EventValidationServer.java @@ -10,8 +10,8 @@ import com.codahale.metrics.Timer.Context; import com.google.protobuf.BoolValue; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.KeyEventContext; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.ValidatorGrpc.ValidatorImplBase; +import com.salesforce.apollo.stereotomy.services.grpc.proto.KeyEventContext; +import com.salesforce.apollo.stereotomy.services.grpc.proto.ValidatorGrpc.ValidatorImplBase; import com.salesforce.apollo.archipelago.RoutableService; import com.salesforce.apollo.stereotomy.services.grpc.StereotomyMetrics; import com.salesforce.apollo.stereotomy.services.proto.ProtoEventValidation; @@ -20,7 +20,6 @@ /** * @author hal.hildebrand - * */ public class EventValidationServer extends ValidatorImplBase { diff --git a/stereotomy-services/src/test/java/com/salesforce/apollo/stereotomy/services/grpc/TestBinder.java b/stereotomy-services/src/test/java/com/salesforce/apollo/stereotomy/services/grpc/TestBinder.java index a3d228e172..3a0926d205 100644 --- a/stereotomy-services/src/test/java/com/salesforce/apollo/stereotomy/services/grpc/TestBinder.java +++ b/stereotomy-services/src/test/java/com/salesforce/apollo/stereotomy/services/grpc/TestBinder.java @@ -6,8 +6,6 @@ */ package com.salesforce.apollo.stereotomy.services.grpc; -import com.salesfoce.apollo.stereotomy.event.proto.Binding; -import com.salesfoce.apollo.stereotomy.event.proto.Ident; import com.salesforce.apollo.archipelago.LocalServer; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.ServerConnectionCache; @@ -16,6 +14,8 @@ import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; import com.salesforce.apollo.protocols.ClientIdentity; import com.salesforce.apollo.stereotomy.StereotomyImpl; +import com.salesforce.apollo.stereotomy.event.proto.Binding; +import com.salesforce.apollo.stereotomy.event.proto.Ident; import com.salesforce.apollo.stereotomy.mem.MemKERL; import com.salesforce.apollo.stereotomy.mem.MemKeyStore; import com.salesforce.apollo.stereotomy.services.grpc.binder.BinderClient; @@ -28,7 +28,6 @@ import java.time.Duration; import java.util.UUID; import java.util.concurrent.CompletableFuture; -import java.util.concurrent.Executors; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -57,14 +56,13 @@ public void bind() throws Exception { var context = DigestAlgorithm.DEFAULT.getOrigin(); var prefix = UUID.randomUUID().toString(); var entropy = SecureRandom.getInstance("SHA1PRNG"); - entropy.setSeed(new byte[]{6, 6, 6}); + entropy.setSeed(new byte[] { 6, 6, 6 }); var stereotomy = new StereotomyImpl(new MemKeyStore(), new MemKERL(DigestAlgorithm.DEFAULT), entropy); var serverMember = new ControlledIdentifierMember(stereotomy.newIdentifier()); var clientMember = new ControlledIdentifierMember(stereotomy.newIdentifier()); var builder = ServerConnectionCache.newBuilder(); - final var exec = Executors.newVirtualThreadPerTaskExecutor(); serverRouter = new LocalServer(prefix, serverMember).router(builder); clientRouter = new LocalServer(prefix, clientMember).router(builder); @@ -95,10 +93,10 @@ public Digest getFrom() { } }; serverRouter.create(serverMember, context, protoService, protoService.getClass().toString(), - r -> new BinderServer(r, ci, null), null, null); + r -> new BinderServer(r, ci, null), null, null); var clientComms = clientRouter.create(clientMember, context, protoService, protoService.getClass().toString(), - r -> new BinderServer(r, ci, null), BinderClient.getCreate(null), null); + r -> new BinderServer(r, ci, null), BinderClient.getCreate(null), null); var client = clientComms.connect(serverMember); diff --git a/stereotomy-services/src/test/java/com/salesforce/apollo/stereotomy/services/grpc/TestEventObserver.java b/stereotomy-services/src/test/java/com/salesforce/apollo/stereotomy/services/grpc/TestEventObserver.java index e4b6612916..9ba52e7b91 100644 --- a/stereotomy-services/src/test/java/com/salesforce/apollo/stereotomy/services/grpc/TestEventObserver.java +++ b/stereotomy-services/src/test/java/com/salesforce/apollo/stereotomy/services/grpc/TestEventObserver.java @@ -6,10 +6,10 @@ */ package com.salesforce.apollo.stereotomy.services.grpc; -import com.salesfoce.apollo.stereotomy.event.proto.AttachmentEvent; -import com.salesfoce.apollo.stereotomy.event.proto.KERL_; -import com.salesfoce.apollo.stereotomy.event.proto.KeyEvent_; -import com.salesfoce.apollo.stereotomy.event.proto.Validations; +import com.salesforce.apollo.stereotomy.event.proto.AttachmentEvent; +import com.salesforce.apollo.stereotomy.event.proto.KERL_; +import com.salesforce.apollo.stereotomy.event.proto.KeyEvent_; +import com.salesforce.apollo.stereotomy.event.proto.Validations; import com.salesforce.apollo.archipelago.LocalServer; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.ServerConnectionCache; @@ -65,7 +65,6 @@ public void observer() throws Exception { var clientMember = new ControlledIdentifierMember(stereotomy.newIdentifier()); var builder = ServerConnectionCache.newBuilder(); - final var exec = Executors.newVirtualThreadPerTaskExecutor(); serverRouter = new LocalServer(prefix, serverMember).router(builder); clientRouter = new LocalServer(prefix, clientMember).router(builder); diff --git a/stereotomy-services/src/test/java/com/salesforce/apollo/stereotomy/services/grpc/TestEventValidation.java b/stereotomy-services/src/test/java/com/salesforce/apollo/stereotomy/services/grpc/TestEventValidation.java index f657d23977..0ac4b5ac17 100644 --- a/stereotomy-services/src/test/java/com/salesforce/apollo/stereotomy/services/grpc/TestEventValidation.java +++ b/stereotomy-services/src/test/java/com/salesforce/apollo/stereotomy/services/grpc/TestEventValidation.java @@ -6,13 +6,13 @@ */ package com.salesforce.apollo.stereotomy.services.grpc; -import com.salesfoce.apollo.stereotomy.event.proto.KeyEvent_; import com.salesforce.apollo.archipelago.LocalServer; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.ServerConnectionCache; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; import com.salesforce.apollo.stereotomy.StereotomyImpl; +import com.salesforce.apollo.stereotomy.event.proto.KeyEvent_; import com.salesforce.apollo.stereotomy.mem.MemKERL; import com.salesforce.apollo.stereotomy.mem.MemKeyStore; import com.salesforce.apollo.stereotomy.services.grpc.validation.EventValidationClient; @@ -25,7 +25,6 @@ import java.time.Duration; import java.util.UUID; import java.util.concurrent.CompletableFuture; -import java.util.concurrent.Executors; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -54,14 +53,13 @@ public void validation() throws Exception { var context = DigestAlgorithm.DEFAULT.getOrigin(); var prefix = UUID.randomUUID().toString(); var entropy = SecureRandom.getInstance("SHA1PRNG"); - entropy.setSeed(new byte[]{6, 6, 6}); + entropy.setSeed(new byte[] { 6, 6, 6 }); var stereotomy = new StereotomyImpl(new MemKeyStore(), new MemKERL(DigestAlgorithm.DEFAULT), entropy); var serverMember = new ControlledIdentifierMember(stereotomy.newIdentifier()); var clientMember = new ControlledIdentifierMember(stereotomy.newIdentifier()); var builder = ServerConnectionCache.newBuilder(); - final var exec = Executors.newVirtualThreadPerTaskExecutor(); serverRouter = new LocalServer(prefix, serverMember).router(builder); clientRouter = new LocalServer(prefix, clientMember).router(builder); @@ -79,11 +77,11 @@ public CompletableFuture validate(KeyEvent_ event) { }; serverRouter.create(serverMember, context, protoService, protoService.getClass().toString(), - r -> new EventValidationServer(r, null), null, null); + r -> new EventValidationServer(r, null), null, null); var clientComms = clientRouter.create(clientMember, context, protoService, protoService.getClass().toString(), - r -> new EventValidationServer(r, null), - EventValidationClient.getCreate(null), null); + r -> new EventValidationServer(r, null), + EventValidationClient.getCreate(null), null); var client = clientComms.connect(serverMember); diff --git a/stereotomy-services/src/test/java/com/salesforce/apollo/stereotomy/services/grpc/TestKerlService.java b/stereotomy-services/src/test/java/com/salesforce/apollo/stereotomy/services/grpc/TestKerlService.java index f5dda7e51c..1c4721d54e 100644 --- a/stereotomy-services/src/test/java/com/salesforce/apollo/stereotomy/services/grpc/TestKerlService.java +++ b/stereotomy-services/src/test/java/com/salesforce/apollo/stereotomy/services/grpc/TestKerlService.java @@ -14,7 +14,7 @@ import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; import com.salesforce.apollo.stereotomy.*; import com.salesforce.apollo.stereotomy.event.KeyEvent; -import com.salesforce.apollo.stereotomy.event.Seal.CoordinatesSeal; +import com.salesforce.apollo.stereotomy.event.Seal; import com.salesforce.apollo.stereotomy.event.Seal.DigestSeal; import com.salesforce.apollo.stereotomy.identifier.spec.InteractionSpecification; import com.salesforce.apollo.stereotomy.identifier.spec.RotationSpecification; @@ -43,8 +43,8 @@ */ public class TestKerlService { final StereotomyKeyStore ks = new MemKeyStore(); - KERL kel; - SecureRandom secureRandom; + KERL.AppendKERL kel; + SecureRandom secureRandom; private Router clientRouter; private Router serverRouter; @@ -79,8 +79,7 @@ public void kerl() throws Exception { var digest = DigestAlgorithm.BLAKE3_256.digest("digest seal".getBytes()); var event = EventCoordinates.of(kel.getKeyEvent(i.getLastEstablishmentEvent())); - var seals = List.of(DigestSeal.construct(digest), DigestSeal.construct(digest), - CoordinatesSeal.construct(event)); + var seals = List.of(DigestSeal.construct(digest), DigestSeal.construct(digest), Seal.construct(event)); i.rotate(); i.seal(InteractionSpecification.newBuilder()); diff --git a/stereotomy-services/src/test/java/com/salesforce/apollo/stereotomy/services/grpc/TestResolver.java b/stereotomy-services/src/test/java/com/salesforce/apollo/stereotomy/services/grpc/TestResolver.java index 208ba6507a..19da109324 100644 --- a/stereotomy-services/src/test/java/com/salesforce/apollo/stereotomy/services/grpc/TestResolver.java +++ b/stereotomy-services/src/test/java/com/salesforce/apollo/stereotomy/services/grpc/TestResolver.java @@ -6,8 +6,8 @@ */ package com.salesforce.apollo.stereotomy.services.grpc; -import com.salesfoce.apollo.stereotomy.event.proto.Binding; -import com.salesfoce.apollo.stereotomy.event.proto.Ident; +import com.salesforce.apollo.stereotomy.event.proto.Binding; +import com.salesforce.apollo.stereotomy.event.proto.Ident; import com.salesforce.apollo.archipelago.LocalServer; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.ServerConnectionCache; diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/DelegatedKERL.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/DelegatedKERL.java index d87980e969..182b1db35e 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/DelegatedKERL.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/DelegatedKERL.java @@ -22,10 +22,10 @@ /** * @author hal.hildebrand */ -public class DelegatedKERL implements KERL { - protected final KERL delegate; +public class DelegatedKERL implements KERL.AppendKERL { + protected final AppendKERL delegate; - public DelegatedKERL(KERL delegate) { + public DelegatedKERL(AppendKERL delegate) { this.delegate = delegate; } @@ -79,6 +79,11 @@ public KeyState getKeyState(Identifier identifier) { return delegate.getKeyState(identifier); } + @Override + public KeyState getKeyState(Identifier identifier, ULong sequenceNumber) { + return delegate.getKeyState(identifier, sequenceNumber); + } + @Override public KeyStateWithAttachments getKeyStateWithAttachments(EventCoordinates coordinates) { return delegate.getKeyStateWithAttachments(coordinates); @@ -104,9 +109,4 @@ public Verifier.DefaultVerifier getVerifier(KeyCoordinates coordinates) { public List kerl(Identifier identifier) { return delegate.kerl(identifier); } - - @Override - public KeyState getKeyState(Identifier identifier, ULong sequenceNumber) { - return delegate.getKeyState(identifier, sequenceNumber); - } } diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/DigestKERL.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/DigestKERL.java index 06dd1c5a05..67cf693c0c 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/DigestKERL.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/DigestKERL.java @@ -12,7 +12,7 @@ /** * @author hal.hildebrand */ -public interface DigestKERL extends KERL { +public interface DigestKERL extends KERL.AppendKERL { KeyEvent getKeyEvent(Digest digest); diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/EventCoordinates.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/EventCoordinates.java index 104e5ef457..d87b109aae 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/EventCoordinates.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/EventCoordinates.java @@ -12,7 +12,7 @@ import org.joou.ULong; -import com.salesfoce.apollo.stereotomy.event.proto.EventCoords; +import com.salesforce.apollo.stereotomy.event.proto.EventCoords; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.stereotomy.event.KeyEvent; @@ -20,13 +20,36 @@ /** * Coordinates that precisely locate a unique event in the KEL - * - * @author hal.hildebrand * + * @author hal.hildebrand */ public class EventCoordinates { public static EventCoordinates NONE = new EventCoordinates(); + private final Digest digest; + private final Identifier identifier; + private final String ilk; + private final ULong sequenceNumber; + + public EventCoordinates(EventCoords coordinates) { + digest = Digest.from(coordinates.getDigest()); + ilk = coordinates.getIlk(); + identifier = Identifier.from(coordinates.getIdentifier()); + sequenceNumber = ULong.valueOf(coordinates.getSequenceNumber()); + } + + public EventCoordinates(Identifier identifier, ULong sequenceNumber, Digest digest, String ilk) { + this.identifier = requireNonNull(identifier, "identifier"); + this.sequenceNumber = sequenceNumber; + this.digest = requireNonNull(digest, "digest"); + this.ilk = ilk; + } + private EventCoordinates() { + identifier = Identifier.NONE; + digest = Digest.NONE; + sequenceNumber = ULong.valueOf(-1); + ilk = KeyEvent.NONE; + } public static EventCoordinates from(EventCoords coordinates) { if (EventCoords.getDefaultInstance().equals(coordinates)) { @@ -62,40 +85,14 @@ public static EventCoordinates of(KeyEvent event, DigestAlgorithm algorithm) { return of(event, digest); } - private final Digest digest; - private final Identifier identifier; - private final String ilk; - private final ULong sequenceNumber; - - public EventCoordinates(EventCoords coordinates) { - digest = Digest.from(coordinates.getDigest()); - ilk = coordinates.getIlk(); - identifier = Identifier.from(coordinates.getIdentifier()); - sequenceNumber = ULong.valueOf(coordinates.getSequenceNumber()); - } - - public EventCoordinates(Identifier identifier, ULong sequenceNumber, Digest digest, String ilk) { - this.identifier = requireNonNull(identifier, "identifier"); - this.sequenceNumber = sequenceNumber; - this.digest = requireNonNull(digest, "digest"); - this.ilk = ilk; - } - - private EventCoordinates() { - identifier = Identifier.NONE; - digest = Digest.NONE; - sequenceNumber = ULong.valueOf(-1); - ilk = KeyEvent.NONE; - } - @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof EventCoordinates other) { - return Objects.equals(sequenceNumber, other.sequenceNumber) && Objects.equals(ilk, other.ilk) && - Objects.equals(identifier, other.identifier) && Objects.equals(digest, other.digest); + return Objects.equals(sequenceNumber, other.sequenceNumber) && Objects.equals(ilk, other.ilk) + && Objects.equals(identifier, other.identifier) && Objects.equals(digest, other.digest); } return false; } diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/EventValidation.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/EventValidation.java index d97d495c40..d40187f771 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/EventValidation.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/EventValidation.java @@ -6,34 +6,22 @@ */ package com.salesforce.apollo.stereotomy; -import java.io.InputStream; -import java.util.Optional; - -import com.google.protobuf.ByteString; -import com.salesforce.apollo.cryptography.JohnHancock; -import com.salesforce.apollo.cryptography.SigningThreshold; -import com.salesforce.apollo.cryptography.Verifier.Filtered; import com.salesforce.apollo.stereotomy.event.EstablishmentEvent; -import com.salesforce.apollo.utils.BbBackedInputStream; +import com.salesforce.apollo.stereotomy.identifier.Identifier; +import org.joou.ULong; /** * The EventValidation provides validation predicates for EstablishmentEvents - * - * @author hal.hildebrand * + * @author hal.hildebrand */ public interface EventValidation { - EventValidation NONE = new EventValidation() { - @Override - public Filtered filtered(EventCoordinates coordinates, SigningThreshold threshold, JohnHancock signature, - InputStream message) { - return null; - } + EventValidation NONE = new EventValidation() { @Override - public Optional getKeyState(EventCoordinates coordinates) { - return Optional.empty(); + public KeyState keyState(Identifier id, ULong sequenceNumber) { + return null; } @Override @@ -45,43 +33,68 @@ public boolean validate(EstablishmentEvent event) { public boolean validate(EventCoordinates coordinates) { return true; } + }; + EventValidation NO_VALIDATION = new EventValidation() { @Override - public boolean verify(EventCoordinates coordinates, JohnHancock signature, InputStream message) { - return true; + public KeyState keyState(Identifier id, ULong sequenceNumber) { + return null; } @Override - public boolean verify(EventCoordinates coordinates, SigningThreshold threshold, JohnHancock signature, - InputStream message) { - return true; + public boolean validate(EstablishmentEvent event) { + return false; } - }; - Filtered filtered(EventCoordinates coordinates, SigningThreshold threshold, JohnHancock signature, - InputStream message); + @Override + public boolean validate(EventCoordinates coordinates) { + return false; + } + }; - Optional getKeyState(EventCoordinates coordinates); + KeyState keyState(Identifier id, ULong sequenceNumber); /** - * Answer true if the event is validated. This means that thresholds have been - * met from indicated witnesses and trusted validators. + * Answer true if the event indicated by the coordinates is validated. This means that thresholds have been met from + * indicated witnesses and trusted validators. */ - boolean validate(EstablishmentEvent event); + boolean validate(EventCoordinates coordinates); /** - * Answer true if the event indicated by the coordinates is validated. This - * means that thresholds have been met from indicated witnesses and trusted - * validators. + * Answer true if the event is validated. This means that thresholds have been met from indicated witnesses and + * trusted validators. */ - boolean validate(EventCoordinates coordinates); + boolean validate(EstablishmentEvent event); - default boolean verify(EventCoordinates coordinates, JohnHancock signature, ByteString byteString) { - return verify(coordinates, signature, BbBackedInputStream.aggregate(byteString)); - } + class DelegatedEventValidation implements EventValidation { + private volatile EventValidation delegate; + + public DelegatedEventValidation(EventValidation delegate) { + this.delegate = delegate; + } - boolean verify(EventCoordinates coordinates, JohnHancock signature, InputStream message); + @Override + public KeyState keyState(Identifier id, ULong sequenceNumber) { + return delegate().keyState(id, sequenceNumber); + } - boolean verify(EventCoordinates coordinates, SigningThreshold threshold, JohnHancock signature, - InputStream message); + public void setDelegate(EventValidation delegate) { + this.delegate = delegate; + } + + @Override + public boolean validate(EventCoordinates coordinates) { + return delegate().validate(coordinates); + } + + @Override + public boolean validate(EstablishmentEvent event) { + return delegate().validate(event); + } + + private EventValidation delegate() { + final var current = delegate; + return current; + } + } } diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KEL.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KEL.java index 4575b9c24b..7c2faa4eec 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KEL.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KEL.java @@ -6,12 +6,12 @@ */ package com.salesforce.apollo.stereotomy; -import com.salesfoce.apollo.stereotomy.event.proto.KeyStateWithAttachments_; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.Verifier; import com.salesforce.apollo.stereotomy.event.AttachmentEvent; import com.salesforce.apollo.stereotomy.event.AttachmentEvent.Attachment; import com.salesforce.apollo.stereotomy.event.KeyEvent; +import com.salesforce.apollo.stereotomy.event.proto.KeyStateWithAttachments_; import com.salesforce.apollo.stereotomy.event.protobuf.KeyStateImpl; import com.salesforce.apollo.stereotomy.identifier.Identifier; import org.joou.ULong; @@ -27,33 +27,6 @@ */ public interface KEL { - /** - * Answer the Verifier using key state at the supplied key coordinates - * - * @return - */ - default Verifier.DefaultVerifier getVerifier(KeyCoordinates coordinates) { - return new Verifier.DefaultVerifier( - getKeyState(coordinates.getEstablishmentEvent()).getKeys().get(coordinates.getKeyIndex())); - } - - /** - * Append the event. The event will be validated before inserted. - */ - KeyState append(KeyEvent event); - - /** - * Append the list of events. The events will be validated before inserted. - */ - default List append(KeyEvent... event) { - return append(Arrays.asList(event), Collections.emptyList()); - } - - /** - * Append the list of events and attachments. The events will be validated before inserted. - */ - List append(List events, List attachments); - /** * Answer the Attachment for the coordinates */ @@ -79,6 +52,8 @@ default List append(KeyEvent... event) { */ KeyState getKeyState(Identifier identifier); + KeyState getKeyState(Identifier identifier, ULong sequenceNumber); + /** * Answer the combined KeyState and Attachment for this state * @@ -89,7 +64,34 @@ default KeyStateWithAttachments getKeyStateWithAttachments(EventCoordinates coor return new KeyStateWithAttachments(getKeyState(coordinates), getAttachment(coordinates)); } - KeyState getKeyState(Identifier identifier, ULong sequenceNumber); + /** + * Answer the Verifier using key state at the supplied key coordinates + * + * @return + */ + default Verifier.DefaultVerifier getVerifier(KeyCoordinates coordinates) { + return new Verifier.DefaultVerifier( + getKeyState(coordinates.getEstablishmentEvent()).getKeys().get(coordinates.getKeyIndex())); + } + + interface AppendKEL extends KEL { + /** + * Append the event. The event will be validated before inserted. + */ + KeyState append(KeyEvent event); + + /** + * Append the list of events. The events will be validated before inserted. + */ + default List append(KeyEvent... event) { + return append(Arrays.asList(event), Collections.emptyList()); + } + + /** + * Append the list of events and attachments. The events will be validated before inserted. + */ + List append(List events, List attachments); + } record KeyStateWithAttachments(KeyState state, Attachment attachments) { public static KeyStateWithAttachments from(KeyStateWithAttachments_ ksa) { diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KERL.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KERL.java index 36dcdff710..d54224e291 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KERL.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KERL.java @@ -7,12 +7,12 @@ package com.salesforce.apollo.stereotomy; import com.google.protobuf.InvalidProtocolBufferException; -import com.salesfoce.apollo.stereotomy.event.proto.KeyEventWithAttachments; import com.salesforce.apollo.cryptography.JohnHancock; import com.salesforce.apollo.stereotomy.event.AttachmentEvent; import com.salesforce.apollo.stereotomy.event.AttachmentEvent.Attachment; import com.salesforce.apollo.stereotomy.event.KeyEvent; import com.salesforce.apollo.stereotomy.event.KeyStateWithEndorsementsAndValidations; +import com.salesforce.apollo.stereotomy.event.proto.KeyEventWithAttachments; import com.salesforce.apollo.stereotomy.event.protobuf.ProtobufEventFactory; import com.salesforce.apollo.stereotomy.identifier.Identifier; @@ -26,10 +26,6 @@ */ public interface KERL extends KEL { - Void append(List events); - - Void appendValidations(EventCoordinates coordinates, Map validations); - default KeyStateWithEndorsementsAndValidations getKeyStateWithEndorsementsAndValidations( EventCoordinates coordinates) { var ks = getKeyStateWithAttachments(coordinates); @@ -77,6 +73,13 @@ private List kerl(KeyEvent event) { return result; } + interface AppendKERL extends KERL, AppendKEL { + Void append(List events); + + Void appendValidations(EventCoordinates coordinates, Map validations); + + } + record EventWithAttachments(KeyEvent event, Attachment attachments) { static EventWithAttachments fromBase64(String encoded) { @@ -86,7 +89,7 @@ static EventWithAttachments fromBase64(String encoded) { if (split.length == 3) { try { attachment = Attachment.of( - com.salesfoce.apollo.stereotomy.event.proto.Attachment.parseFrom(decoder.decode(split[2]))); + com.salesforce.apollo.stereotomy.event.proto.Attachment.parseFrom(decoder.decode(split[2]))); } catch (InvalidProtocolBufferException e) { throw new IllegalArgumentException("Invalid attachment: " + encoded); } @@ -97,24 +100,24 @@ static EventWithAttachments fromBase64(String encoded) { attachment); } - public KeyEventWithAttachments toKeyEvente() { - var builder = KeyEventWithAttachments.newBuilder(); - event.setEventOf(builder); - if (attachments != null) { - builder.setAttachment(attachments.toAttachemente()); - } - return builder.build(); - } - public String toBase64() { var encoder = Base64.getUrlEncoder().withoutPadding(); var attachBytes = - attachments == null ? com.salesfoce.apollo.stereotomy.event.proto.Attachment.getDefaultInstance() - .toByteArray() + attachments == null ? com.salesforce.apollo.stereotomy.event.proto.Attachment.getDefaultInstance() + .toByteArray() : attachments.toAttachemente().toByteArray(); var encoded = event.getIlk() + "|" + encoder.encodeToString(event.getBytes()) + "|" + encoder.encodeToString(attachBytes); return encoded; } + + public KeyEventWithAttachments toKeyEvente() { + var builder = KeyEventWithAttachments.newBuilder(); + event.setEventOf(builder); + if (attachments != null) { + builder.setAttachment(attachments.toAttachemente()); + } + return builder.build(); + } } } diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KerlVerifier.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KerlVerifier.java index 5de68d9685..104fb5b58e 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KerlVerifier.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KerlVerifier.java @@ -1,66 +1,24 @@ package com.salesforce.apollo.stereotomy; -import com.salesforce.apollo.cryptography.JohnHancock; -import com.salesforce.apollo.cryptography.SigningThreshold; -import com.salesforce.apollo.cryptography.Verifier; import com.salesforce.apollo.stereotomy.identifier.Identifier; import org.joou.ULong; -import java.io.InputStream; -import java.util.Optional; - /** * A Verifier that uses the backing Stereotomy for keys used for Signature verification for the Identifier * * @author hal.hildebrand **/ -public class KerlVerifier implements Verifier { +public class KerlVerifier extends KeyStateVerifier { - private final D identifier; private final KERL kerl; public KerlVerifier(D identifier, KERL kerl) { - this.identifier = identifier; + super(identifier); this.kerl = kerl; } - public D identifier() { - return identifier; - } - @Override - public Filtered filtered(SigningThreshold threshold, JohnHancock signature, InputStream message) { - var verifier = verifierFor(signature.getSequenceNumber()); - return verifier.isEmpty() ? new Filtered(false, 0, - new JohnHancock(signature.getAlgorithm(), new byte[] {}, ULong.MIN)) - : verifier.get().filtered(threshold, signature, message); - } - - @Override - public boolean verify(JohnHancock signature, InputStream message) { - var verifier = verifierFor(signature.getSequenceNumber()); - return verifier.isEmpty() ? false : verifier.get().verify(signature, message); - } - - @Override - public boolean verify(SigningThreshold threshold, JohnHancock signature, InputStream message) { - var verifier = verifierFor(signature.getSequenceNumber()); - return verifier.isEmpty() ? false : verifier.get().verify(threshold, signature, message); - } - - private Optional verifierFor(ULong sequenceNumber) { - KeyState keyState = kerl.getKeyState(identifier, sequenceNumber); - if (keyState == null) { - return Optional.empty(); - } - return Optional.of(new DefaultVerifier(keyState.getKeys())); - } - - public Optional verifierFor(EventCoordinates coordinates) { - KeyState keyState = kerl.getKeyState(coordinates); - if (keyState == null) { - return Optional.empty(); - } - return Optional.of(new DefaultVerifier(keyState.getKeys())); + protected KeyState getKeyState(ULong sequenceNumber) { + return kerl.getKeyState(identifier, sequenceNumber); } } diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KeyCoordinates.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KeyCoordinates.java index 4ad16d4afd..b2e2a46077 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KeyCoordinates.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KeyCoordinates.java @@ -10,32 +10,18 @@ import java.util.Objects; -import com.salesfoce.apollo.stereotomy.event.proto.KeyCoords; +import com.salesforce.apollo.stereotomy.event.proto.KeyCoords; import com.salesforce.apollo.stereotomy.event.EstablishmentEvent; /** * The coordinates of a key in the KEL - * - * @author hal.hildebrand * + * @author hal.hildebrand */ public class KeyCoordinates { - public static KeyCoordinates of(EstablishmentEvent establishmentEvent, int keyIndex) { - EventCoordinates coordinates = EventCoordinates.of(establishmentEvent); - return new KeyCoordinates(coordinates, keyIndex); - } - private final EventCoordinates establishmentEvent; private final int keyIndex; - - public KeyCoords toKeyCoords() { - return KeyCoords.newBuilder() - .setEstablishment(establishmentEvent.toEventCoords()) - .setKeyIndex(keyIndex) - .build(); - } - public KeyCoordinates(EventCoordinates establishmentEvent, int keyIndex) { if (keyIndex < 0) { throw new IllegalArgumentException("keyIndex must be >= 0"); @@ -50,6 +36,11 @@ public KeyCoordinates(KeyCoords coordinates) { keyIndex = coordinates.getKeyIndex(); } + public static KeyCoordinates of(EstablishmentEvent establishmentEvent, int keyIndex) { + EventCoordinates coordinates = EventCoordinates.of(establishmentEvent); + return new KeyCoordinates(coordinates, keyIndex); + } + @Override public boolean equals(Object obj) { if (this == obj) { @@ -75,6 +66,13 @@ public int hashCode() { return Objects.hash(establishmentEvent, keyIndex); } + public KeyCoords toKeyCoords() { + return KeyCoords.newBuilder() + .setEstablishment(establishmentEvent.toEventCoords()) + .setKeyIndex(keyIndex) + .build(); + } + @Override public String toString() { return this.establishmentEvent + ":" + this.keyIndex; diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KeyState.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KeyState.java index 31430c117d..06a194c83f 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KeyState.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KeyState.java @@ -13,7 +13,7 @@ import org.joou.ULong; -import com.salesfoce.apollo.stereotomy.event.proto.KeyState_; +import com.salesforce.apollo.stereotomy.event.proto.KeyState_; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.SigningThreshold; import com.salesforce.apollo.stereotomy.event.InceptionEvent.ConfigurationTrait; @@ -22,9 +22,8 @@ /** * The state of a key in the KEL - * - * @author hal.hildebrand * + * @author hal.hildebrand */ public interface KeyState { @@ -57,18 +56,19 @@ default ULong getSequenceNumber() { SigningThreshold getSigningThreshold(); - List getWitnesses(); - int getWitnessThreshold(); + List getWitnesses(); + default boolean isDelegated() { - return this.getDelegatingIdentifier().isPresent() && - !this.getDelegatingIdentifier().get().equals(Identifier.NONE); + return this.getDelegatingIdentifier().isPresent() && !this.getDelegatingIdentifier() + .get() + .equals(Identifier.NONE); } default boolean isTransferable() { - return this.getCoordinates().getIdentifier().isTransferable() && - this.getNextKeyConfigurationDigest().isPresent(); + return this.getCoordinates().getIdentifier().isTransferable() && this.getNextKeyConfigurationDigest() + .isPresent(); } KeyState_ toKeyState_(); diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KeyStateVerifier.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KeyStateVerifier.java new file mode 100644 index 0000000000..db7082e783 --- /dev/null +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/KeyStateVerifier.java @@ -0,0 +1,57 @@ +package com.salesforce.apollo.stereotomy; + +import com.salesforce.apollo.cryptography.JohnHancock; +import com.salesforce.apollo.cryptography.SigningThreshold; +import com.salesforce.apollo.cryptography.Verifier; +import com.salesforce.apollo.stereotomy.identifier.Identifier; +import org.joou.ULong; + +import java.io.InputStream; +import java.util.Optional; + +/** + * An abstract verifier that uses KeyState + * + * @author hal.hildebrand + **/ +public abstract class KeyStateVerifier implements Verifier { + protected final D identifier; + + public KeyStateVerifier(D identifier) { + this.identifier = identifier; + } + + @Override + public Filtered filtered(SigningThreshold threshold, JohnHancock signature, InputStream message) { + var verifier = verifierFor(signature.getSequenceNumber()); + return verifier.isEmpty() ? new Filtered(false, 0, + new JohnHancock(signature.getAlgorithm(), new byte[] {}, ULong.MIN)) + : verifier.get().filtered(threshold, signature, message); + } + + public D identifier() { + return identifier; + } + + @Override + public boolean verify(SigningThreshold threshold, JohnHancock signature, InputStream message) { + var verifier = verifierFor(signature.getSequenceNumber()); + return verifier.isEmpty() ? false : verifier.get().verify(threshold, signature, message); + } + + @Override + public boolean verify(JohnHancock signature, InputStream message) { + var verifier = verifierFor(signature.getSequenceNumber()); + return verifier.isEmpty() ? false : verifier.get().verify(signature, message); + } + + protected abstract KeyState getKeyState(ULong sequenceNumber); + + protected Optional verifierFor(ULong sequenceNumber) { + KeyState keyState = getKeyState(sequenceNumber); + if (keyState == null) { + return Optional.empty(); + } + return Optional.of(new DefaultVerifier(keyState.getKeys())); + } +} diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/ReadOnlyKERL.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/ReadOnlyKERL.java deleted file mode 100644 index 1c4dc0d69d..0000000000 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/ReadOnlyKERL.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (c) 2022, salesforce.com, inc. - * All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause - */ -package com.salesforce.apollo.stereotomy; - -import com.salesforce.apollo.cryptography.JohnHancock; -import com.salesforce.apollo.stereotomy.event.AttachmentEvent; -import com.salesforce.apollo.stereotomy.event.KeyEvent; - -import java.util.Collections; -import java.util.List; -import java.util.Map; - -/** - * @author hal.hildebrand - */ -public class ReadOnlyKERL extends DelegatedKERL { - - public ReadOnlyKERL(KERL delegate) { - super(delegate); - } - - private static T complete() { - return null; - } - - private static List completeList() { - return Collections.emptyList(); - } - - @Override - public KeyState append(KeyEvent event) { - return complete(); - } - - @Override - public List append(KeyEvent... events) { - return completeList(); - } - - @Override - public Void append(List events) { - return complete(); - } - - @Override - public List append(List events, List attachments) { - return completeList(); - } - - @Override - public Void appendValidations(EventCoordinates coordinates, - Map validations) { - return complete(); - } -} diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/Stereotomy.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/Stereotomy.java index d7bb301eb6..16e96ac8c8 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/Stereotomy.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/Stereotomy.java @@ -7,8 +7,8 @@ package com.salesforce.apollo.stereotomy; import com.google.protobuf.InvalidProtocolBufferException; -import com.salesfoce.apollo.cryptography.proto.Sig; -import com.salesfoce.apollo.stereotomy.event.proto.EventCoords; +import com.salesforce.apollo.cryptography.proto.Sig; +import com.salesforce.apollo.stereotomy.event.proto.EventCoords; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.JohnHancock; import com.salesforce.apollo.cryptography.Verifier; diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/StereotomyImpl.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/StereotomyImpl.java index 96b8cb5e56..470b4a5fe5 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/StereotomyImpl.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/StereotomyImpl.java @@ -6,7 +6,6 @@ */ package com.salesforce.apollo.stereotomy; -import com.salesfoce.apollo.stereotomy.event.proto.KeyState_; import com.salesforce.apollo.cryptography.*; import com.salesforce.apollo.cryptography.Signer.SignerImpl; import com.salesforce.apollo.cryptography.cert.BcX500NameDnImpl; @@ -18,6 +17,7 @@ import com.salesforce.apollo.stereotomy.event.AttachmentEvent.AttachmentImpl; import com.salesforce.apollo.stereotomy.event.InceptionEvent.ConfigurationTrait; import com.salesforce.apollo.stereotomy.event.Seal.EventSeal; +import com.salesforce.apollo.stereotomy.event.proto.KeyState_; import com.salesforce.apollo.stereotomy.event.protobuf.ProtobufEventFactory; import com.salesforce.apollo.stereotomy.identifier.BasicIdentifier; import com.salesforce.apollo.stereotomy.identifier.Identifier; @@ -50,15 +50,16 @@ public class StereotomyImpl implements Stereotomy { private static final Logger log = LoggerFactory.getLogger(StereotomyImpl.class); final EventFactory eventFactory; - final KERL kerl; + final KERL.AppendKERL kerl; private final SecureRandom entropy; private final StereotomyKeyStore keyStore; - public StereotomyImpl(StereotomyKeyStore keyStore, KERL kerl, SecureRandom entropy) { + public StereotomyImpl(StereotomyKeyStore keyStore, KERL.AppendKERL kerl, SecureRandom entropy) { this(keyStore, kerl, entropy, new ProtobufEventFactory()); } - public StereotomyImpl(StereotomyKeyStore keyStore, KERL kerl, SecureRandom entropy, EventFactory eventFactory) { + public StereotomyImpl(StereotomyKeyStore keyStore, KERL.AppendKERL kerl, SecureRandom entropy, + EventFactory eventFactory) { this.keyStore = keyStore; this.entropy = entropy; this.eventFactory = eventFactory; @@ -99,7 +100,7 @@ public KeyState getKeyState(EventCoordinates eventCoordinates) { @Override public Verifier getVerifier(KeyCoordinates coordinates) { KeyState state = getKeyState(coordinates); - return new Verifier.DefaultVerifier(state.getKeys().get(coordinates.getKeyIndex())); + return new Verifier.DefaultVerifier(state.getKeys()); } @Override @@ -389,13 +390,13 @@ public SigningThreshold getSigningThreshold() { } @Override - public List getWitnesses() { - return getState().getWitnesses(); + public int getWitnessThreshold() { + return getState().getWitnessThreshold(); } @Override - public int getWitnessThreshold() { - return getState().getWitnessThreshold(); + public List getWitnesses() { + return getState().getWitnesses(); } @Override diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/Verifiers.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/Verifiers.java index 5ec9d31fe6..370302e3e2 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/Verifiers.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/Verifiers.java @@ -6,28 +6,130 @@ */ package com.salesforce.apollo.stereotomy; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; - -import com.salesfoce.apollo.stereotomy.event.proto.KeyState_; +import com.salesforce.apollo.cryptography.JohnHancock; +import com.salesforce.apollo.cryptography.SigningThreshold; import com.salesforce.apollo.cryptography.Verifier; import com.salesforce.apollo.cryptography.Verifier.DefaultVerifier; import com.salesforce.apollo.stereotomy.event.InceptionEvent; +import com.salesforce.apollo.stereotomy.event.proto.KeyState_; import com.salesforce.apollo.stereotomy.event.protobuf.KeyStateImpl; import com.salesforce.apollo.stereotomy.event.protobuf.ProtobufEventFactory; import com.salesforce.apollo.stereotomy.identifier.Identifier; +import java.io.InputStream; +import java.util.*; + /** * @author hal.hildebrand - * */ public interface Verifiers { + Verifiers NONE = new Verifiers() { + @Override + public Optional verifierFor(EventCoordinates coordinates) { + return Optional.of(v()); + } + + @Override + public Optional verifierFor(Identifier identifier) { + return Optional.of(v()); + } + + Verifier v() { + return new Verifier() { + @Override + public Filtered filtered(SigningThreshold threshold, JohnHancock signature, InputStream message) { + return new Filtered(false, 0, null); + } + + @Override + public boolean verify(JohnHancock signature, InputStream message) { + return true; + } + + @Override + public boolean verify(SigningThreshold threshold, JohnHancock signature, InputStream message) { + return true; + } + }; + } + }; + + static Verifiers from(KERL kerl) { + return new Verifiers() { + @Override + public Optional verifierFor(EventCoordinates coordinates) { + return verifierFor(coordinates.getIdentifier()); + } + + @Override + public Optional verifierFor(Identifier identifier) { + return Optional.of(new KerlVerifier<>(identifier, kerl)); + } + }; + } + + static Verifiers fromEvents(List states) { + return new FixedVerifiers(FixedVerifiers.fromEvents(states)); + } + + static Verifiers fromEventState(List states) { + return new FixedVerifiers(FixedVerifiers.fromEventState(states)); + } + + static Verifiers fromKeyState(List states) { + return new FixedVerifiers(FixedVerifiers.fromKeyState(states)); + } + + static Verifiers fromKeyState_(List states) { + return new FixedVerifiers(FixedVerifiers.fromKeyState_(states)); + } + + Optional verifierFor(EventCoordinates coordinates); + + Optional verifierFor(Identifier identifier); + + class DelegatedVerifiers implements Verifiers { + private volatile Verifiers delegate; + + public DelegatedVerifiers(Verifiers delegate) { + this.delegate = delegate; + } + + public void setDelegate(Verifiers delegate) { + this.delegate = delegate; + } + + @Override + public Optional verifierFor(Identifier identifier) { + return delegate().verifierFor(identifier); + } + + @Override + public Optional verifierFor(EventCoordinates coordinates) { + return delegate().verifierFor(coordinates); + } + + private Verifiers delegate() { + final var current = delegate; + return current; + } + } + class FixedVerifiers implements Verifiers { - record Pair(Map coords, Map ids) {} + private final Map verifiersByCoordinates; + private final Map verifiersByIdentifer; + + public FixedVerifiers(Map verifiersByCoordinates, + Map verifiersByIdentifer) { + this.verifiersByCoordinates = verifiersByCoordinates; + this.verifiersByIdentifer = verifiersByIdentifer; + } + + private FixedVerifiers(Pair verifiers) { + verifiersByCoordinates = verifiers.coords; + verifiersByIdentifer = verifiers.ids; + } private static Pair fromEvents(Collection states) { Map coords = new HashMap<>(); @@ -41,7 +143,8 @@ private static Pair fromEvents(Collection states) { return new Pair(coords, ids); } - private static Pair fromEventState(Collection states) { + private static Pair fromEventState( + Collection states) { return fromEvents(states.stream().map(ks -> ProtobufEventFactory.toKeyEvent(ks)).toList()); } @@ -61,20 +164,6 @@ private static Pair fromKeyState_(Collection states) { return fromKeyState(states.stream().map(ks -> new KeyStateImpl(ks)).map(ks -> (KeyState) ks).toList()); } - private final Map verifiersByCoordinates; - private final Map verifiersByIdentifer; - - public FixedVerifiers(Map verifiersByCoordinates, - Map verifiersByIdentifer) { - this.verifiersByCoordinates = verifiersByCoordinates; - this.verifiersByIdentifer = verifiersByIdentifer; - } - - private FixedVerifiers(Pair verifiers) { - verifiersByCoordinates = verifiers.coords; - verifiersByIdentifer = verifiers.ids; - } - @Override public Optional verifierFor(EventCoordinates coordinates) { return Optional.ofNullable(verifiersByCoordinates.get(coordinates)); @@ -84,26 +173,9 @@ public Optional verifierFor(EventCoordinates coordinates) { public Optional verifierFor(Identifier identifier) { return Optional.ofNullable(verifiersByIdentifer.get(identifier)); } - } - - static Verifiers fromEvents(List states) { - return new FixedVerifiers(FixedVerifiers.fromEvents(states)); - } - - static Verifiers fromEventState(List states) { - return new FixedVerifiers(FixedVerifiers.fromEventState(states)); - } - static Verifiers fromKeyState(List states) { - return new FixedVerifiers(FixedVerifiers.fromKeyState(states)); - } - - static Verifiers fromKeyState_(List states) { - return new FixedVerifiers(FixedVerifiers.fromKeyState_(states)); + record Pair(Map coords, Map ids) { + } } - Optional verifierFor(EventCoordinates coordinates); - - Optional verifierFor(Identifier identifier); - } diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/caching/CachingKEL.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/caching/CachingKEL.java index 9e31bca719..6323cb982d 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/caching/CachingKEL.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/caching/CachingKEL.java @@ -41,7 +41,7 @@ * * @author hal.hildebrand */ -public class CachingKEL implements KEL { +public class CachingKEL implements KEL.AppendKEL { private static final Logger log = LoggerFactory.getLogger(CachingKEL.class); private final Function, ?> kelSupplier; private final LoadingCache keyCoords; @@ -130,6 +130,11 @@ public List append(List events, List attach } } + public void clear() { + keyCoords.invalidateAll(); + ksCoords.invalidateAll(); + } + @Override public Attachment getAttachment(EventCoordinates coordinates) { try { @@ -171,9 +176,9 @@ public KeyState getKeyState(Identifier identifier) { } @Override - public KeyStateWithAttachments getKeyStateWithAttachments(EventCoordinates coordinates) { + public KeyState getKeyState(Identifier identifier, ULong sequenceNumber) { try { - return complete(kel -> kel.getKeyStateWithAttachments(coordinates)); + return complete(kel -> kel.getKeyState(identifier, sequenceNumber)); } catch (Throwable e) { log.error("Cannot complete append", e); return null; @@ -181,9 +186,9 @@ public KeyStateWithAttachments getKeyStateWithAttachments(EventCoordinates coord } @Override - public KeyState getKeyState(Identifier identifier, ULong sequenceNumber) { + public KeyStateWithAttachments getKeyStateWithAttachments(EventCoordinates coordinates) { try { - return complete(kel -> kel.getKeyState(identifier, sequenceNumber)); + return complete(kel -> kel.getKeyStateWithAttachments(coordinates)); } catch (Throwable e) { log.error("Cannot complete append", e); return null; @@ -210,9 +215,4 @@ protected T complete(Function func) { return null; } } - - public void clear() { - keyCoords.invalidateAll(); - ksCoords.invalidateAll(); - } } diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/caching/CachingKERL.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/caching/CachingKERL.java index ab0e9bf1d5..abf0c66e4a 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/caching/CachingKERL.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/caching/CachingKERL.java @@ -26,14 +26,14 @@ /** * @author hal.hildebrand */ -public class CachingKERL extends CachingKEL implements KERL { +public class CachingKERL extends CachingKEL implements KERL.AppendKERL { private static final Logger log = LoggerFactory.getLogger(CachingKERL.class); - public CachingKERL(Function, ?> kelSupplier) { + public CachingKERL(Function, ?> kelSupplier) { super(kelSupplier); } - public CachingKERL(Function, ?> kelSupplier, Caffeine builder, + public CachingKERL(Function, ?> kelSupplier, Caffeine builder, Caffeine eventBuilder) { super(kelSupplier, builder, eventBuilder); } @@ -59,6 +59,11 @@ public Void appendValidations(EventCoordinates coordinates, Map kerl.getKeyState(identifier, sequenceNumber)); + } + @Override public Map getValidations(EventCoordinates coordinates) { try { @@ -78,9 +83,4 @@ public List kerl(Identifier identifier) { return null; } } - - @Override - public KeyState getKeyState(Identifier identifier, ULong sequenceNumber) { - return complete(kerl -> kerl.getKeyState(identifier, sequenceNumber)); - } } diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/db/UniKERL.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/db/UniKERL.java index 33b04e08dc..30a89e7422 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/db/UniKERL.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/db/UniKERL.java @@ -7,9 +7,9 @@ package com.salesforce.apollo.stereotomy.db; import com.google.protobuf.InvalidProtocolBufferException; -import com.salesfoce.apollo.cryptography.proto.Sig; -import com.salesfoce.apollo.stereotomy.event.proto.EventCoords; -import com.salesfoce.apollo.stereotomy.event.proto.Sealed; +import com.salesforce.apollo.cryptography.proto.Sig; +import com.salesforce.apollo.stereotomy.event.proto.EventCoords; +import com.salesforce.apollo.stereotomy.event.proto.Sealed; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.JohnHancock; @@ -237,7 +237,7 @@ public static void appendAttachments(Connection connection, List attachm AttachmentEvent event; try { event = new AttachmentEventImpl( - com.salesfoce.apollo.stereotomy.event.proto.AttachmentEvent.parseFrom(bytes)); + com.salesforce.apollo.stereotomy.event.proto.AttachmentEvent.parseFrom(bytes)); } catch (InvalidProtocolBufferException e) { log.error("Error deserializing attachment event", e); return; diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/AttachmentEvent.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/AttachmentEvent.java index b6522abb5b..3f6c1f7cbe 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/AttachmentEvent.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/AttachmentEvent.java @@ -18,9 +18,22 @@ /** * @author hal.hildebrand - * */ public interface AttachmentEvent { + static Attachment of(Seal... seals) { + return new AttachmentImpl(Arrays.asList(seals)); + } + + Attachment attachments(); + + EventCoordinates coordinates(); + + byte[] getBytes(); + + com.salesforce.apollo.stereotomy.event.proto.AttachmentEvent toEvent_(); + + Version version(); + interface Attachment { Attachment EMPTY = new Attachment() { @@ -36,26 +49,26 @@ public List seals() { } }; - static Attachment of(com.salesfoce.apollo.stereotomy.event.proto.Attachment attachment) { + static Attachment of(com.salesforce.apollo.stereotomy.event.proto.Attachment attachment) { return new AttachmentImpl(attachment.getSealsList().stream().map(s -> Seal.from(s)).toList(), attachment.getEndorsementsMap() .entrySet() .stream() - .collect(Collectors.toMap(e -> e.getKey(), - e -> JohnHancock.of(e.getValue())))); + .collect( + Collectors.toMap(e -> e.getKey(), e -> JohnHancock.of(e.getValue())))); } Map endorsements(); List seals(); - default com.salesfoce.apollo.stereotomy.event.proto.Attachment toAttachemente() { - var builder = com.salesfoce.apollo.stereotomy.event.proto.Attachment.newBuilder(); + default com.salesforce.apollo.stereotomy.event.proto.Attachment toAttachemente() { + var builder = com.salesforce.apollo.stereotomy.event.proto.Attachment.newBuilder(); builder.addAllSeals(seals().stream().map(s -> s.toSealed()).toList()) .putAllEndorsements(endorsements().entrySet() .stream() - .collect(Collectors.toMap(e -> e.getKey(), - e -> e.getValue().toSig()))); + .collect( + Collectors.toMap(e -> e.getKey(), e -> e.getValue().toSig()))); return builder.build(); } } @@ -95,18 +108,4 @@ public List seals() { } } - - static Attachment of(Seal... seals) { - return new AttachmentImpl(Arrays.asList(seals)); - } - - Attachment attachments(); - - EventCoordinates coordinates(); - - byte[] getBytes(); - - com.salesfoce.apollo.stereotomy.event.proto.AttachmentEvent toEvent_(); - - Version version(); } diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/KeyEvent.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/KeyEvent.java index 5589eba813..d5cf47bd17 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/KeyEvent.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/KeyEvent.java @@ -6,30 +6,26 @@ */ package com.salesforce.apollo.stereotomy.event; -import org.joou.ULong; - -import com.salesfoce.apollo.stereotomy.event.proto.KeyEventWithAttachments.Builder; -import com.salesfoce.apollo.stereotomy.event.proto.KeyEvent_; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.JohnHancock; import com.salesforce.apollo.stereotomy.EventCoordinates; +import com.salesforce.apollo.stereotomy.event.proto.KeyEventWithAttachments.Builder; +import com.salesforce.apollo.stereotomy.event.proto.KeyEvent_; import com.salesforce.apollo.stereotomy.identifier.Identifier; +import org.joou.ULong; /** * @author hal.hildebrand - * */ public interface KeyEvent { - public static final String DELEGATED_INCEPTION_TYPE = "dip"; - public static final String DELEGATED_ROTATION_TYPE = "drt"; - public static final String INCEPTION_TYPE = "icp"; - public static final String INTERACTION_TYPE = "ixn"; - public static final String NONE = "nan"; - public static final String RECEIPT_FROM_BASIC_TYPE = "rct"; - public static final String RECEIPT_FROM_TRANSFERABLE_TYPE = "vrc"; - public static final String ROTATION_TYPE = "rot"; + String DELEGATED_INCEPTION_TYPE = "dip"; + String DELEGATED_ROTATION_TYPE = "drt"; + String INCEPTION_TYPE = "icp"; + String INTERACTION_TYPE = "ixn"; + String NONE = "nan"; + String ROTATION_TYPE = "rot"; JohnHancock getAuthentication(); diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/KeyStateWithEndorsementsAndValidations.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/KeyStateWithEndorsementsAndValidations.java index a8dd8ef358..029f75e560 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/KeyStateWithEndorsementsAndValidations.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/KeyStateWithEndorsementsAndValidations.java @@ -10,8 +10,8 @@ import java.util.TreeMap; import java.util.stream.Collectors; -import com.salesfoce.apollo.stereotomy.event.proto.KeyStateWithEndorsementsAndValidations_; -import com.salesfoce.apollo.stereotomy.event.proto.Validation_; +import com.salesforce.apollo.stereotomy.event.proto.KeyStateWithEndorsementsAndValidations_; +import com.salesforce.apollo.stereotomy.event.proto.Validation_; import com.salesforce.apollo.cryptography.JohnHancock; import com.salesforce.apollo.stereotomy.EventCoordinates; import com.salesforce.apollo.stereotomy.KeyState; @@ -19,7 +19,6 @@ /** * @author hal.hildebrand - * */ public record KeyStateWithEndorsementsAndValidations(KeyState state, TreeMap endorsements, Map validations) { @@ -31,16 +30,18 @@ public static KeyStateWithEndorsementsAndValidations create(KeyState state, Map< } public static KeyStateWithEndorsementsAndValidations from(KeyStateWithEndorsementsAndValidations_ ks) { - return new KeyStateWithEndorsementsAndValidations(new KeyStateImpl(ks.getState()), - new TreeMap<>(ks.getEndorsementsMap() - .entrySet() - .stream() - .collect(Collectors.toMap(e -> e.getKey(), - e -> JohnHancock.from(e.getValue())))), - ks.getValidationsList() - .stream() - .collect(Collectors.toMap(e -> EventCoordinates.from(e.getValidator()), - e -> JohnHancock.from(e.getSignature())))); + return new KeyStateWithEndorsementsAndValidations(new KeyStateImpl(ks.getState()), new TreeMap<>( + ks.getEndorsementsMap() + .entrySet() + .stream() + .collect(Collectors.toMap(e -> e.getKey(), e -> JohnHancock.from(e.getValue())))), ks.getValidationsList() + .stream() + .collect( + Collectors.toMap( + e -> EventCoordinates.from( + e.getValidator()), + e -> JohnHancock.from( + e.getSignature())))); } public KeyStateWithEndorsementsAndValidations_ toKS() { @@ -50,17 +51,20 @@ public KeyStateWithEndorsementsAndValidations_ toKS() { .addAllValidations(validations.entrySet() .stream() .map(e -> Validation_.newBuilder() - .setValidator(e.getKey() - .toEventCoords()) - .setSignature(e.getValue() - .toSig()) + .setValidator( + e.getKey() + .toEventCoords()) + .setSignature( + e.getValue() + .toSig()) .build()) .toList()) .putAllEndorsements(endorsements.entrySet() .stream() - .collect(Collectors.toMap(e -> e.getKey(), - e -> e.getValue() - .toSig()))) + .collect( + Collectors.toMap(e -> e.getKey(), + e -> e.getValue() + .toSig()))) .build(); } diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/Seal.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/Seal.java index 15df4ba2f0..372af44cf2 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/Seal.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/Seal.java @@ -6,33 +6,42 @@ */ package com.salesforce.apollo.stereotomy.event; -import com.salesfoce.apollo.stereotomy.event.proto.EventLoc; -import com.salesfoce.apollo.stereotomy.event.proto.Sealed; +import com.google.protobuf.Any; +import com.salesforce.apollo.stereotomy.event.proto.EventLoc; +import com.salesforce.apollo.stereotomy.event.proto.Sealed; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.stereotomy.EventCoordinates; import com.salesforce.apollo.stereotomy.identifier.Identifier; /** * @author hal.hildebrand - * */ public interface Seal { - interface CoordinatesSeal extends Seal { - static CoordinatesSeal construct(EventCoordinates coordinates) { - return new CoordinatesSeal() { + static Seal from(Sealed s) { + if (s.hasEventCoordinates()) { + return construct(new EventCoordinates(s.getEventCoordinates())); + } + if (s.hasDigest()) { + return DigestSeal.construct(Digest.from(s.getDigest())); + } + if (s.hasEvent()) { + EventLoc event = s.getEvent(); + return EventSeal.construct(Identifier.from(event.getIdentifier()), new Digest(event.getDigest()), + event.getSequenceNumber()); + } + return ProtoSeal.construct(s.getValue()); + } + + static CoordinatesSeal construct(EventCoordinates coordinates) { + return new CoordinatesSealImpl(coordinates); + } - @Override - public EventCoordinates getEvent() { - return coordinates; - } + Sealed toSealed(); - @Override - public Sealed toSealed() { - return Sealed.newBuilder().setEventCoordinates(coordinates.toEventCoords()).build(); - } - }; - } + Any value(); + + interface CoordinatesSeal extends Seal { EventCoordinates getEvent(); } @@ -40,53 +49,40 @@ public Sealed toSealed() { interface DigestSeal extends Seal { static DigestSeal construct(Digest digest) { - return new DigestSeal() { - - @Override - public Digest getDigest() { - return digest; - } - - @Override - public Sealed toSealed() { - return Sealed.newBuilder().setDigest(digest.toDigeste()).build(); - } - }; + return new DigestSealImpl(digest); } Digest getDigest(); + + class DigestSealImpl extends ProtoSeal implements DigestSeal { + + private final Digest digest; + + public DigestSealImpl(Digest digest) { + this(digest, null); + } + + public DigestSealImpl(Digest digest, Any value) { + super(value); + this.digest = digest; + } + + @Override + public Digest getDigest() { + return digest; + } + + @Override + public Sealed toSealed() { + return Sealed.newBuilder().setDigest(digest.toDigeste()).setValue(value()).build(); + } + } } interface EventSeal extends Seal { static EventSeal construct(Identifier prefix, Digest digest, long sequenceNumber) { - return new EventSeal() { - - @Override - public Digest getDigest() { - return digest; - } - - @Override - public Identifier getPrefix() { - return prefix; - } - - @Override - public long getSequenceNumber() { - return sequenceNumber; - } - - @Override - public Sealed toSealed() { - return Sealed.newBuilder() - .setEvent(EventLoc.newBuilder() - .setIdentifier(prefix.toIdent()) - .setSequenceNumber(sequenceNumber)) - .setDigest(digest.toDigeste()) - .build(); - } - }; + return new EventSealImpl(prefix, digest, sequenceNumber); } Digest getDigest(); @@ -94,22 +90,93 @@ public Sealed toSealed() { Identifier getPrefix(); long getSequenceNumber(); + + class EventSealImpl extends ProtoSeal implements EventSeal { + private final Identifier prefix; + private final Digest digest; + private final long sequenceNumber; + + public EventSealImpl(Identifier prefix, Digest digest, long sequenceNumber) { + this(prefix, digest, sequenceNumber, null); + } + + public EventSealImpl(Identifier prefix, Digest digest, long sequenceNumber, Any value) { + super(value); + this.prefix = prefix; + this.digest = digest; + this.sequenceNumber = sequenceNumber; + } + + @Override + public Digest getDigest() { + return digest; + } + + @Override + public Identifier getPrefix() { + return prefix; + } + + @Override + public long getSequenceNumber() { + return sequenceNumber; + } + + @Override + public Sealed toSealed() { + return Sealed.newBuilder() + .setEvent(EventLoc.newBuilder() + .setDigest(digest.toDigeste()) + .setIdentifier(prefix.toIdent()) + .setSequenceNumber(sequenceNumber)) + .setValue(value()) + .build(); + } + } } - static Seal from(Sealed s) { - if (s.hasEventCoordinates()) { - return CoordinatesSeal.construct(new EventCoordinates(s.getEventCoordinates())); + class ProtoSeal implements Seal { + private final Any value; + + public ProtoSeal(Any value) { + this.value = value == null ? Any.getDefaultInstance() : value; } - if (s.hasDigest()) { - return DigestSeal.construct(Digest.from(s.getDigest())); + + static ProtoSeal construct(Any value) { + return new Seal.ProtoSeal(value); } - if (s.hasEvent()) { - EventLoc event = s.getEvent(); - return EventSeal.construct(Identifier.from(event.getIdentifier()), new Digest(event.getDigest()), - event.getSequenceNumber()); + + @Override + public Sealed toSealed() { + return Sealed.newBuilder().setValue(value).build(); + } + + @Override + public Any value() { + return value; } - throw new IllegalArgumentException("Unknown seal type"); } - Sealed toSealed(); + class CoordinatesSealImpl extends ProtoSeal implements CoordinatesSeal { + private final EventCoordinates coordinates; + + public CoordinatesSealImpl(EventCoordinates coordinates) { + this(coordinates, null); + } + + public CoordinatesSealImpl(EventCoordinates coordinates, Any value) { + super(value); + this.coordinates = coordinates; + } + + @Override + public EventCoordinates getEvent() { + return coordinates; + } + + @Override + public Sealed toSealed() { + return Sealed.newBuilder().setEventCoordinates(coordinates.toEventCoords()).build(); + } + } } diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/AttachmentEventImpl.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/AttachmentEventImpl.java index 4fbefd356e..d081c42545 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/AttachmentEventImpl.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/AttachmentEventImpl.java @@ -20,13 +20,12 @@ /** * @author hal.hildebrand - * */ public class AttachmentEventImpl implements AttachmentEvent { - private final com.salesfoce.apollo.stereotomy.event.proto.AttachmentEvent event; + private final com.salesforce.apollo.stereotomy.event.proto.AttachmentEvent event; - public AttachmentEventImpl(com.salesfoce.apollo.stereotomy.event.proto.AttachmentEvent event) { + public AttachmentEventImpl(com.salesforce.apollo.stereotomy.event.proto.AttachmentEvent event) { this.event = event; } @@ -78,7 +77,7 @@ public int hashCode() { } @Override - public com.salesfoce.apollo.stereotomy.event.proto.AttachmentEvent toEvent_() { + public com.salesforce.apollo.stereotomy.event.proto.AttachmentEvent toEvent_() { return event; } diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/DelegatedInceptionEventImpl.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/DelegatedInceptionEventImpl.java index 0f58539c9b..005ffb5966 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/DelegatedInceptionEventImpl.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/DelegatedInceptionEventImpl.java @@ -11,11 +11,10 @@ /** * @author hal.hildebrand - * */ public class DelegatedInceptionEventImpl extends InceptionEventImpl implements DelegatedInceptionEvent { - public DelegatedInceptionEventImpl(com.salesfoce.apollo.stereotomy.event.proto.InceptionEvent inceptionEvent) { + public DelegatedInceptionEventImpl(com.salesforce.apollo.stereotomy.event.proto.InceptionEvent inceptionEvent) { super(inceptionEvent); } diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/DelegatedRotationEventImpl.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/DelegatedRotationEventImpl.java index 323fcf465c..819140fb4f 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/DelegatedRotationEventImpl.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/DelegatedRotationEventImpl.java @@ -6,12 +6,11 @@ */ package com.salesforce.apollo.stereotomy.event.protobuf; -import com.salesfoce.apollo.stereotomy.event.proto.RotationEvent; +import com.salesforce.apollo.stereotomy.event.proto.RotationEvent; import com.salesforce.apollo.stereotomy.event.DelegatedRotationEvent; /** * @author hal.hildebrand - * */ public class DelegatedRotationEventImpl extends RotationEventImpl implements DelegatedRotationEvent { diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/EstablishmentEventImpl.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/EstablishmentEventImpl.java index 6aa3aecabc..5bc36f3a04 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/EstablishmentEventImpl.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/EstablishmentEventImpl.java @@ -14,16 +14,15 @@ import java.util.Optional; import java.util.stream.Collectors; -import com.salesfoce.apollo.stereotomy.event.proto.Establishment; -import com.salesfoce.apollo.stereotomy.event.proto.EventCommon; -import com.salesfoce.apollo.stereotomy.event.proto.Header; +import com.salesforce.apollo.stereotomy.event.proto.Establishment; +import com.salesforce.apollo.stereotomy.event.proto.EventCommon; +import com.salesforce.apollo.stereotomy.event.proto.Header; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.SigningThreshold; import com.salesforce.apollo.stereotomy.event.EstablishmentEvent; /** * @author hal.hildebrand - * */ abstract public class EstablishmentEventImpl extends KeyEventImpl implements EstablishmentEvent { diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/InceptionEventImpl.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/InceptionEventImpl.java index 8816f41fa2..f9a347a561 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/InceptionEventImpl.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/InceptionEventImpl.java @@ -12,22 +12,21 @@ import java.util.stream.Collectors; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.stereotomy.event.proto.KeyEventWithAttachments.Builder; -import com.salesfoce.apollo.stereotomy.event.proto.KeyEvent_; -import com.salesfoce.apollo.cryptography.proto.PubKey; +import com.salesforce.apollo.stereotomy.event.proto.KeyEventWithAttachments.Builder; +import com.salesforce.apollo.stereotomy.event.proto.KeyEvent_; +import com.salesforce.apollo.cryptography.proto.PubKey; import com.salesforce.apollo.stereotomy.event.InceptionEvent; import com.salesforce.apollo.stereotomy.identifier.BasicIdentifier; import com.salesforce.apollo.stereotomy.identifier.Identifier; /** * @author hal.hildebrand - * */ public class InceptionEventImpl extends EstablishmentEventImpl implements InceptionEvent { - final com.salesfoce.apollo.stereotomy.event.proto.InceptionEvent event; + final com.salesforce.apollo.stereotomy.event.proto.InceptionEvent event; - public InceptionEventImpl(com.salesfoce.apollo.stereotomy.event.proto.InceptionEvent inceptionEvent) { + public InceptionEventImpl(com.salesforce.apollo.stereotomy.event.proto.InceptionEvent inceptionEvent) { super(inceptionEvent.getSpecification().getHeader(), inceptionEvent.getCommon(), inceptionEvent.getSpecification().getEstablishment()); event = inceptionEvent; @@ -80,7 +79,7 @@ public void setEventOf(Builder builder) { builder.setInception(event); } - public com.salesfoce.apollo.stereotomy.event.proto.InceptionEvent toInceptionEvent_() { + public com.salesforce.apollo.stereotomy.event.proto.InceptionEvent toInceptionEvent_() { return event; } diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/InteractionEventImpl.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/InteractionEventImpl.java index 6e5fe2c306..59fa3c7ffa 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/InteractionEventImpl.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/InteractionEventImpl.java @@ -11,20 +11,19 @@ import java.util.stream.Collectors; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.stereotomy.event.proto.KeyEventWithAttachments.Builder; -import com.salesfoce.apollo.stereotomy.event.proto.KeyEvent_; +import com.salesforce.apollo.stereotomy.event.proto.KeyEventWithAttachments.Builder; +import com.salesforce.apollo.stereotomy.event.proto.KeyEvent_; import com.salesforce.apollo.stereotomy.event.InteractionEvent; import com.salesforce.apollo.stereotomy.event.Seal; /** * @author hal.hildebrand - * */ public class InteractionEventImpl extends KeyEventImpl implements InteractionEvent { - private final com.salesfoce.apollo.stereotomy.event.proto.InteractionEvent event; + private final com.salesforce.apollo.stereotomy.event.proto.InteractionEvent event; - public InteractionEventImpl(com.salesfoce.apollo.stereotomy.event.proto.InteractionEvent event) { + public InteractionEventImpl(com.salesforce.apollo.stereotomy.event.proto.InteractionEvent event) { super(event.getSpecification().getHeader(), event.getCommon()); this.event = event; } diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/KeyEventImpl.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/KeyEventImpl.java index 86baaa14d2..5dee1af5eb 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/KeyEventImpl.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/KeyEventImpl.java @@ -13,8 +13,8 @@ import org.joou.ULong; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.stereotomy.event.proto.EventCommon; -import com.salesfoce.apollo.stereotomy.event.proto.Header; +import com.salesforce.apollo.stereotomy.event.proto.EventCommon; +import com.salesforce.apollo.stereotomy.event.proto.Header; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.JohnHancock; @@ -25,9 +25,8 @@ /** * Grpc implemention of abstract KeyEvent - * - * @author hal.hildebrand * + * @author hal.hildebrand */ abstract public class KeyEventImpl implements KeyEvent { diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/KeyStateImpl.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/KeyStateImpl.java index 7433cd0a96..76d71c757f 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/KeyStateImpl.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/KeyStateImpl.java @@ -20,7 +20,7 @@ import java.util.stream.Collectors; import com.google.protobuf.InvalidProtocolBufferException; -import com.salesfoce.apollo.stereotomy.event.proto.KeyState_; +import com.salesforce.apollo.stereotomy.event.proto.KeyState_; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.SigningThreshold; @@ -36,13 +36,22 @@ /** * @author hal.hildebrand - * */ public class KeyStateImpl implements KeyState { + private final KeyState_ state; + + public KeyStateImpl(byte[] content) throws InvalidProtocolBufferException { + this.state = KeyState_.parseFrom(content); + } + + public KeyStateImpl(KeyState_ state) { + this.state = state; + } + public static KeyState initialState(InceptionEvent event, DigestAlgorithm digestAlgo) { - var delegatingPrefix = event instanceof DelegatedInceptionEvent ? ((DelegatedInceptionEvent) event).getDelegatingPrefix() - : null; + var delegatingPrefix = + event instanceof DelegatedInceptionEvent ? ((DelegatedInceptionEvent) event).getDelegatingPrefix() : null; return newKeyState(event.getIdentifier(), event.getSigningThreshold(), event.getKeys(), event.getNextKeysDigest().orElse(null), event.getWitnessThreshold(), event.getWitnesses(), @@ -50,46 +59,34 @@ public static KeyState initialState(InceptionEvent event, DigestAlgorithm digest digestAlgo.digest(event.getBytes())); } - public static KeyState newKeyState(Identifier identifier, - SigningThreshold signingThreshold, - List keys, Digest nextKeyConfiguration, int witnessThreshold, + public static KeyState newKeyState(Identifier identifier, SigningThreshold signingThreshold, List keys, + Digest nextKeyConfiguration, int witnessThreshold, List witnesses, Set configurationTraits, KeyEvent event, EstablishmentEvent lastEstablishmentEvent, Identifier delegatingPrefix, Digest digest) { final var builder = KeyState_.newBuilder(); return new KeyStateImpl(builder.addAllKeys(keys.stream().map(pk -> bs(pk)).collect(Collectors.toList())) - .setNextKeyConfigurationDigest(nextKeyConfiguration == null ? Digest.NONE.toDigeste() - : nextKeyConfiguration.toDigeste()) + .setNextKeyConfigurationDigest( + nextKeyConfiguration == null ? Digest.NONE.toDigeste() + : nextKeyConfiguration.toDigeste()) .setSigningThreshold(toSigningThreshold(signingThreshold)) - .addAllWitnesses(witnesses.stream() - .map(e -> e.toIdent()) - .collect(Collectors.toList())) + .addAllWitnesses( + witnesses.stream().map(e -> e.toIdent()).collect(Collectors.toList())) .setWitnessThreshold(witnessThreshold) .setDigest(digest.toDigeste()) - .addAllConfigurationTraits(configurationTraits.stream() - .map(e -> e.name()) - .collect(Collectors.toList())) + .addAllConfigurationTraits( + configurationTraits.stream().map(e -> e.name()).collect(Collectors.toList())) .setCoordinates(event.getCoordinates().toEventCoords()) - .setDelegatingIdentifier(delegatingPrefix == null ? Identifier.NONE_IDENT - : delegatingPrefix.toIdent()) + .setDelegatingIdentifier( + delegatingPrefix == null ? Identifier.NONE_IDENT : delegatingPrefix.toIdent()) - .setLastEstablishmentEvent(lastEstablishmentEvent.getCoordinates() - .toEventCoords()) + .setLastEstablishmentEvent( + lastEstablishmentEvent.getCoordinates().toEventCoords()) .setLastEvent(event.getPrevious().toEventCoords()) .build()); } - private final KeyState_ state; - - public KeyStateImpl(byte[] content) throws InvalidProtocolBufferException { - this.state = KeyState_.parseFrom(content); - } - - public KeyStateImpl(KeyState_ state) { - this.state = state; - } - @Override public Set configurationTraits() { return state.getConfigurationTraitsList() @@ -156,6 +153,11 @@ public SigningThreshold getSigningThreshold() { return ProtobufEventFactory.toSigningThreshold(state.getSigningThreshold()); } + @Override + public int getWitnessThreshold() { + return state.getWitnessThreshold(); + } + @Override public List getWitnesses() { return state.getWitnessesList() @@ -167,11 +169,6 @@ public List getWitnesses() { .collect(Collectors.toList()); } - @Override - public int getWitnessThreshold() { - return state.getWitnessThreshold(); - } - @Override public int hashCode() { return Objects.hash(state); diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/ProtobufEventFactory.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/ProtobufEventFactory.java index 352bb76e31..33e5ec3ef9 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/ProtobufEventFactory.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/ProtobufEventFactory.java @@ -16,18 +16,18 @@ import java.util.Optional; import java.util.stream.Collectors; -import com.salesfoce.apollo.stereotomy.event.proto.Establishment; -import com.salesfoce.apollo.stereotomy.event.proto.EventCommon; -import com.salesfoce.apollo.stereotomy.event.proto.Header; -import com.salesfoce.apollo.stereotomy.event.proto.Ident; -import com.salesfoce.apollo.stereotomy.event.proto.IdentifierSpec; -import com.salesfoce.apollo.stereotomy.event.proto.InteractionEvent; -import com.salesfoce.apollo.stereotomy.event.proto.InteractionSpec; -import com.salesfoce.apollo.stereotomy.event.proto.KeyEventWithAttachments; -import com.salesfoce.apollo.stereotomy.event.proto.KeyEvent_; -import com.salesfoce.apollo.stereotomy.event.proto.RotationSpec; -import com.salesfoce.apollo.stereotomy.event.proto.Version; -import com.salesfoce.apollo.stereotomy.event.proto.Weights; +import com.salesforce.apollo.stereotomy.event.proto.Establishment; +import com.salesforce.apollo.stereotomy.event.proto.EventCommon; +import com.salesforce.apollo.stereotomy.event.proto.Header; +import com.salesforce.apollo.stereotomy.event.proto.Ident; +import com.salesforce.apollo.stereotomy.event.proto.IdentifierSpec; +import com.salesforce.apollo.stereotomy.event.proto.InteractionEvent; +import com.salesforce.apollo.stereotomy.event.proto.InteractionSpec; +import com.salesforce.apollo.stereotomy.event.proto.KeyEventWithAttachments; +import com.salesforce.apollo.stereotomy.event.proto.KeyEvent_; +import com.salesforce.apollo.stereotomy.event.proto.RotationSpec; +import com.salesforce.apollo.stereotomy.event.proto.Version; +import com.salesforce.apollo.stereotomy.event.proto.Weights; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.SigningThreshold; @@ -47,23 +47,22 @@ /** * @author hal.hildebrand - * */ public class ProtobufEventFactory implements EventFactory { public static final EventFactory INSTANCE = new ProtobufEventFactory(); - public static Digest digestOf(com.salesfoce.apollo.stereotomy.event.proto.AttachmentEvent event, + public static Digest digestOf(com.salesforce.apollo.stereotomy.event.proto.AttachmentEvent event, DigestAlgorithm algo) { return algo.digest(event.getCoordinates().getIdentifier().toByteString()); } - public static Digest digestOf(com.salesfoce.apollo.stereotomy.event.proto.InceptionEvent event, + public static Digest digestOf(com.salesforce.apollo.stereotomy.event.proto.InceptionEvent event, DigestAlgorithm algo) { return algo.digest(event.getIdentifier().toByteString()); } - public static Digest digestOf(com.salesfoce.apollo.stereotomy.event.proto.RotationEvent event, + public static Digest digestOf(com.salesforce.apollo.stereotomy.event.proto.RotationEvent event, DigestAlgorithm algo) { return algo.digest(event.getSpecification().getHeader().getIdentifier().toByteString()); } @@ -78,40 +77,40 @@ public static Digest digestOf(InteractionEvent event, DigestAlgorithm algo) { public static Digest digestOf(final KeyEvent_ event, DigestAlgorithm algo) { return switch (event.getEventCase()) { - case INCEPTION -> digestOf(event.getInception(), algo); - case INTERACTION -> digestOf(event.getInteraction(), algo); - case ROTATION -> digestOf(event.getRotation(), algo); - default -> null; + case INCEPTION -> digestOf(event.getInception(), algo); + case INTERACTION -> digestOf(event.getInteraction(), algo); + case ROTATION -> digestOf(event.getRotation(), algo); + default -> null; }; } public static Digest digestOf(final KeyEventWithAttachments event, DigestAlgorithm algo) { return switch (event.getEventCase()) { - case INCEPTION -> digestOf(event.getInception(), algo); - case INTERACTION -> digestOf(event.getInteraction(), algo); - case ROTATION -> digestOf(event.getRotation(), algo); - default -> null; + case INCEPTION -> digestOf(event.getInception(), algo); + case INTERACTION -> digestOf(event.getInteraction(), algo); + case ROTATION -> digestOf(event.getRotation(), algo); + default -> null; }; } public static KeyEvent from(KeyEvent_ ke) { return switch (ke.getEventCase()) { - case EVENT_NOT_SET -> null; - case INCEPTION -> ProtobufEventFactory.toKeyEvent(ke.getInception()); - case INTERACTION -> new InteractionEventImpl(ke.getInteraction()); - case ROTATION -> ProtobufEventFactory.toKeyEvent(ke.getRotation()); - default -> throw new IllegalArgumentException("Unexpected value: " + ke.getEventCase()); + case EVENT_NOT_SET -> null; + case INCEPTION -> ProtobufEventFactory.toKeyEvent(ke.getInception()); + case INTERACTION -> new InteractionEventImpl(ke.getInteraction()); + case ROTATION -> ProtobufEventFactory.toKeyEvent(ke.getRotation()); + default -> throw new IllegalArgumentException("Unexpected value: " + ke.getEventCase()); }; } public static EventWithAttachments from(KeyEventWithAttachments ke) { var event = switch (ke.getEventCase()) { - case EVENT_NOT_SET -> null; - case INCEPTION -> ProtobufEventFactory.toKeyEvent(ke.getInception()); - case INTERACTION -> new InteractionEventImpl(ke.getInteraction()); - case ROTATION -> ProtobufEventFactory.toKeyEvent(ke.getRotation()); - default -> throw new IllegalArgumentException("Unexpected value: " + ke.getEventCase()); + case EVENT_NOT_SET -> null; + case INCEPTION -> ProtobufEventFactory.toKeyEvent(ke.getInception()); + case INTERACTION -> new InteractionEventImpl(ke.getInteraction()); + case ROTATION -> ProtobufEventFactory.toKeyEvent(ke.getRotation()); + default -> throw new IllegalArgumentException("Unexpected value: " + ke.getEventCase()); }; return new EventWithAttachments(event, AttachmentEvent.Attachment.of(ke.getAttachment())); } @@ -119,37 +118,42 @@ public static EventWithAttachments from(KeyEventWithAttachments ke) { public static KeyEvent toKeyEvent(byte[] event, String ilk) { try { return switch (ilk) { - case ROTATION_TYPE -> new RotationEventImpl(com.salesfoce.apollo.stereotomy.event.proto.RotationEvent.parseFrom(event)); - case DELEGATED_INCEPTION_TYPE -> new DelegatedInceptionEventImpl(com.salesfoce.apollo.stereotomy.event.proto.InceptionEvent.parseFrom(event)); - case DELEGATED_ROTATION_TYPE -> new DelegatedRotationEventImpl(com.salesfoce.apollo.stereotomy.event.proto.RotationEvent.parseFrom(event)); - case INCEPTION_TYPE -> new InceptionEventImpl(com.salesfoce.apollo.stereotomy.event.proto.InceptionEvent.parseFrom(event)); - case INTERACTION_TYPE -> new InteractionEventImpl(InteractionEvent.parseFrom(event)); - default -> null; + case ROTATION_TYPE -> + new RotationEventImpl(com.salesforce.apollo.stereotomy.event.proto.RotationEvent.parseFrom(event)); + case DELEGATED_INCEPTION_TYPE -> new DelegatedInceptionEventImpl( + com.salesforce.apollo.stereotomy.event.proto.InceptionEvent.parseFrom(event)); + case DELEGATED_ROTATION_TYPE -> new DelegatedRotationEventImpl( + com.salesforce.apollo.stereotomy.event.proto.RotationEvent.parseFrom(event)); + case INCEPTION_TYPE -> + new InceptionEventImpl(com.salesforce.apollo.stereotomy.event.proto.InceptionEvent.parseFrom(event)); + case INTERACTION_TYPE -> new InteractionEventImpl(InteractionEvent.parseFrom(event)); + default -> null; }; } catch (Throwable e) { return null; } } - public static InceptionEvent toKeyEvent(com.salesfoce.apollo.stereotomy.event.proto.InceptionEvent inception) { + public static InceptionEvent toKeyEvent(com.salesforce.apollo.stereotomy.event.proto.InceptionEvent inception) { return switch (inception.getSpecification().getHeader().getIlk()) { - case INCEPTION_TYPE -> new InceptionEventImpl(inception); - case DELEGATED_INCEPTION_TYPE -> new DelegatedInceptionEventImpl(inception); - default -> throw new IllegalArgumentException("Unexpected value: " - + inception.getSpecification().getHeader().getIlk()); + case INCEPTION_TYPE -> new InceptionEventImpl(inception); + case DELEGATED_INCEPTION_TYPE -> new DelegatedInceptionEventImpl(inception); + default -> throw new IllegalArgumentException( + "Unexpected value: " + inception.getSpecification().getHeader().getIlk()); }; } - public static RotationEvent toKeyEvent(com.salesfoce.apollo.stereotomy.event.proto.RotationEvent rotation) { + public static RotationEvent toKeyEvent(com.salesforce.apollo.stereotomy.event.proto.RotationEvent rotation) { return switch (rotation.getSpecification().getHeader().getIlk()) { - case ROTATION_TYPE -> new RotationEventImpl(rotation); - case DELEGATED_ROTATION_TYPE -> new DelegatedRotationEventImpl(rotation); - default -> throw new IllegalArgumentException("Unexpected value: " - + rotation.getSpecification().getHeader().getIlk()); + case ROTATION_TYPE -> new RotationEventImpl(rotation); + case DELEGATED_ROTATION_TYPE -> new DelegatedRotationEventImpl(rotation); + default -> + throw new IllegalArgumentException("Unexpected value: " + rotation.getSpecification().getHeader().getIlk()); }; } - public static SigningThreshold toSigningThreshold(com.salesfoce.apollo.stereotomy.event.proto.SigningThreshold signingThreshold) { + public static SigningThreshold toSigningThreshold( + com.salesforce.apollo.stereotomy.event.proto.SigningThreshold signingThreshold) { if (signingThreshold.getWeightsCount() == 0) { return new SigningThreshold.Unweighted() { @Override @@ -176,8 +180,9 @@ public Weight[][] getWeights() { } } - public static com.salesfoce.apollo.stereotomy.event.proto.SigningThreshold.Builder toSigningThreshold(SigningThreshold signingThreshold) { - var builder = com.salesfoce.apollo.stereotomy.event.proto.SigningThreshold.newBuilder(); + public static com.salesforce.apollo.stereotomy.event.proto.SigningThreshold.Builder toSigningThreshold( + SigningThreshold signingThreshold) { + var builder = com.salesforce.apollo.stereotomy.event.proto.SigningThreshold.newBuilder(); if (signingThreshold instanceof SigningThreshold.Unweighted) { builder.setThreshold(((SigningThreshold.Unweighted) signingThreshold).getThreshold()); } else if (signingThreshold instanceof SigningThreshold.Weighted) { @@ -185,10 +190,10 @@ public static com.salesfoce.apollo.stereotomy.event.proto.SigningThreshold.Build for (Weight[] wa : weights) { Weights.Builder wb = Weights.newBuilder(); for (Weight w : wa) { - wb.addWeights(com.salesfoce.apollo.stereotomy.event.proto.Weight.newBuilder() - .setNumerator(w.numerator()) - .setDenominator(w.denominator() - .orElse(0))); + wb.addWeights(com.salesforce.apollo.stereotomy.event.proto.Weight.newBuilder() + .setNumerator(w.numerator()) + .setDenominator( + w.denominator().orElse(0))); } builder.addWeights(wb); } @@ -204,7 +209,7 @@ private static Weight[] weightArrayFrom(Weights w) { return weights; } - private static Weight weightFrom(com.salesfoce.apollo.stereotomy.event.proto.Weight w) { + private static Weight weightFrom(com.salesforce.apollo.stereotomy.event.proto.Weight w) { return new Weight() { @Override @@ -222,7 +227,7 @@ public int numerator() { @Override public AttachmentEvent attachment(EstablishmentEvent event, Attachment attachment) { - var builder = com.salesfoce.apollo.stereotomy.event.proto.AttachmentEvent.newBuilder(); + var builder = com.salesforce.apollo.stereotomy.event.proto.AttachmentEvent.newBuilder(); builder.setAttachment(attachment.toAttachemente()).setCoordinates(event.getCoordinates().toEventCoords()); return new AttachmentEventImpl(builder.build()); } @@ -239,7 +244,7 @@ public InceptionEvent inception(E i var common = EventCommon.newBuilder().setAuthentication(specification.getSigner().sign(bs).toSig()); - var builder = com.salesfoce.apollo.stereotomy.event.proto.InceptionEvent.newBuilder(); + var builder = com.salesforce.apollo.stereotomy.event.proto.InceptionEvent.newBuilder(); if (delegated) { builder.setDelegatingPrefix(identifier.toIdent()); } @@ -258,7 +263,7 @@ public KeyEvent interaction(InteractionSpecification specification) { var common = EventCommon.newBuilder() .setPrevious(specification.getPrevious().toEventCoords()) .setAuthentication(signatures); - var builder = com.salesfoce.apollo.stereotomy.event.proto.InteractionEvent.newBuilder(); + var builder = com.salesforce.apollo.stereotomy.event.proto.InteractionEvent.newBuilder(); return new InteractionEventImpl(builder.setSpecification(ispec).setCommon(common).build()); } @@ -272,12 +277,13 @@ public RotationEvent rotation(RotationSpecification specification, boolean deleg var common = EventCommon.newBuilder() .setPrevious(specification.getPrevious().toEventCoords()) .setAuthentication(signatures); - var builder = com.salesfoce.apollo.stereotomy.event.proto.RotationEvent.newBuilder(); + var builder = com.salesforce.apollo.stereotomy.event.proto.RotationEvent.newBuilder(); var event = builder.setSpecification(rotationSpec).setCommon(common).build(); return delegated ? new DelegatedRotationEventImpl(event) : new RotationEventImpl(event); } - com.salesfoce.apollo.stereotomy.event.proto.Version.Builder toVersion(com.salesforce.apollo.stereotomy.event.Version version) { + com.salesforce.apollo.stereotomy.event.proto.Version.Builder toVersion( + com.salesforce.apollo.stereotomy.event.Version version) { return Version.newBuilder().setMajor(version.getMajor()).setMinor(version.getMinor()); } @@ -286,10 +292,8 @@ private IdentifierSpec identifierSpec(Identifier identifi boolean delegated) { var establishment = Establishment.newBuilder() .setSigningThreshold(toSigningThreshold(specification.getSigningThreshold())) - .addAllKeys(specification.getKeys() - .stream() - .map(k -> bs(k)) - .collect(Collectors.toList())) + .addAllKeys( + specification.getKeys().stream().map(k -> bs(k)).collect(Collectors.toList())) .setNextKeysDigest((specification.getNextKeys() == null ? Digest.NONE : specification.getNextKeys()).toDigeste()) .setWitnessThreshold(specification.getWitnessThreshold()); @@ -333,10 +337,8 @@ private InteractionSpec interactionSpec(InteractionSpecification specification) private RotationSpec rotationSpec(Identifier identifier, RotationSpecification specification, boolean delegated) { var establishment = Establishment.newBuilder() .setSigningThreshold(toSigningThreshold(specification.getSigningThreshold())) - .addAllKeys(specification.getKeys() - .stream() - .map(k -> bs(k)) - .collect(Collectors.toList())) + .addAllKeys( + specification.getKeys().stream().map(k -> bs(k)).collect(Collectors.toList())) .setNextKeysDigest((specification.getNextKeys() == null ? Digest.NONE : specification.getNextKeys()).toDigeste()) .setWitnessThreshold(specification.getWitnessThreshold()); diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/RotationEventImpl.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/RotationEventImpl.java index 94dfb24184..fc5d535904 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/RotationEventImpl.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/event/protobuf/RotationEventImpl.java @@ -13,21 +13,20 @@ import java.util.stream.Collectors; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.stereotomy.event.proto.KeyEventWithAttachments.Builder; -import com.salesfoce.apollo.stereotomy.event.proto.KeyEvent_; +import com.salesforce.apollo.stereotomy.event.proto.KeyEventWithAttachments.Builder; +import com.salesforce.apollo.stereotomy.event.proto.KeyEvent_; import com.salesforce.apollo.stereotomy.event.RotationEvent; import com.salesforce.apollo.stereotomy.event.Seal; import com.salesforce.apollo.stereotomy.identifier.BasicIdentifier; /** * @author hal.hildebrand - * */ public class RotationEventImpl extends EstablishmentEventImpl implements RotationEvent { - final com.salesfoce.apollo.stereotomy.event.proto.RotationEvent event; + final com.salesforce.apollo.stereotomy.event.proto.RotationEvent event; - public RotationEventImpl(com.salesfoce.apollo.stereotomy.event.proto.RotationEvent event) { + public RotationEventImpl(com.salesforce.apollo.stereotomy.event.proto.RotationEvent event) { super(event.getSpecification().getHeader(), event.getCommon(), event.getSpecification().getEstablishment()); this.event = event; } @@ -84,7 +83,7 @@ public KeyEvent_ toKeyEvent_() { return KeyEvent_.newBuilder().setRotation(event).build(); } - public com.salesfoce.apollo.stereotomy.event.proto.RotationEvent toRotationEvent_() { + public com.salesforce.apollo.stereotomy.event.proto.RotationEvent toRotationEvent_() { return event; } diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/BasicIdentifier.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/BasicIdentifier.java index f4c0bd6330..911b182d02 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/BasicIdentifier.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/BasicIdentifier.java @@ -14,15 +14,14 @@ import java.security.PublicKey; import java.util.Objects; -import com.salesfoce.apollo.stereotomy.event.proto.Ident; -import com.salesfoce.apollo.cryptography.proto.PubKey; +import com.salesforce.apollo.stereotomy.event.proto.Ident; +import com.salesforce.apollo.cryptography.proto.PubKey; import com.salesforce.apollo.cryptography.JohnHancock; import com.salesforce.apollo.cryptography.SigningThreshold; import com.salesforce.apollo.cryptography.Verifier; /** * @author hal.hildebrand - * */ public class BasicIdentifier implements Identifier, Verifier { private final PublicKey publicKey; diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/Identifier.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/Identifier.java index 6567c1d707..dc29d4c6bb 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/Identifier.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/Identifier.java @@ -13,7 +13,7 @@ import java.security.PublicKey; import com.google.protobuf.ByteString; -import com.salesfoce.apollo.stereotomy.event.proto.Ident; +import com.salesforce.apollo.stereotomy.event.proto.Ident; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.Signer; @@ -21,38 +21,37 @@ /** * @author hal.hildebrand - * */ public interface Identifier { - final ByteString EMPTY = ByteString.copyFrom(new byte[] { 0 }); - Identifier NONE = new Identifier() { - - @Override - public byte identifierCode() { - return 0; - } - - @Override - public boolean isNone() { - return true; - } - - @Override - public boolean isTransferable() { - return false; - } - - @Override - public Ident toIdent() { - return NONE_IDENT; - } - - @Override - public String toString() { - return ""; - } - }; + final ByteString EMPTY = ByteString.copyFrom(new byte[] { 0 }); public static final Ident NONE_IDENT = Ident.newBuilder().setNONE(true).build(); + Identifier NONE = new Identifier() { + + @Override + public byte identifierCode() { + return 0; + } + + @Override + public boolean isNone() { + return true; + } + + @Override + public boolean isTransferable() { + return false; + } + + @Override + public Ident toIdent() { + return NONE_IDENT; + } + + @Override + public String toString() { + return ""; + } + }; public static Identifier from(Ident identifier) { if (identifier.hasBasic()) { diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/QualifiedBase64Identifier.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/QualifiedBase64Identifier.java index 223b5c475c..54b3d67ae7 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/QualifiedBase64Identifier.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/QualifiedBase64Identifier.java @@ -6,7 +6,7 @@ */ package com.salesforce.apollo.stereotomy.identifier; -import com.salesfoce.apollo.stereotomy.event.proto.Ident; +import com.salesforce.apollo.stereotomy.event.proto.Ident; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.QualifiedBase64; diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/SelfAddressingIdentifier.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/SelfAddressingIdentifier.java index 999a39cfd0..b69689d90a 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/SelfAddressingIdentifier.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/SelfAddressingIdentifier.java @@ -8,13 +8,12 @@ import java.util.Objects; -import com.salesfoce.apollo.stereotomy.event.proto.Ident; +import com.salesforce.apollo.stereotomy.event.proto.Ident; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; /** * @author hal.hildebrand - * */ public class SelfAddressingIdentifier implements Identifier, Comparable { diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/SelfSigningIdentifier.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/SelfSigningIdentifier.java index a0a345946a..1ee71da8e3 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/SelfSigningIdentifier.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/SelfSigningIdentifier.java @@ -8,12 +8,11 @@ import java.util.Objects; -import com.salesfoce.apollo.stereotomy.event.proto.Ident; +import com.salesforce.apollo.stereotomy.event.proto.Ident; import com.salesforce.apollo.cryptography.JohnHancock; /** * @author hal.hildebrand - * */ public class SelfSigningIdentifier implements Identifier { diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/spec/IdentifierSpecification.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/spec/IdentifierSpecification.java index a5f4828f6f..f3be7a8175 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/spec/IdentifierSpecification.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/spec/IdentifierSpecification.java @@ -17,7 +17,7 @@ import java.util.Set; import java.util.stream.Stream; -import com.salesfoce.apollo.stereotomy.event.proto.IdentifierSpec; +import com.salesforce.apollo.stereotomy.event.proto.IdentifierSpec; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.SignatureAlgorithm; @@ -33,32 +33,145 @@ /** * @author hal.hildebrand - * */ public class IdentifierSpecification { - public static class Builder implements Cloneable { + private final Set configurationTraits; + private final Class derivation; + private final DigestAlgorithm identifierDigestAlgorithm; + private final List keys; + private final Digest nextKeys; + private final DigestAlgorithm selfAddressingDigestAlgorithm; + private final SignatureAlgorithm signatureAlgorithm; + private final Signer signer; + private final SigningThreshold signingThreshold; + private final Version version; + private final List witnesses; + private final int witnessThreshold; + private IdentifierSpecification(Class derivation, DigestAlgorithm identifierDigestAlgorithm, + SigningThreshold signingThreshold, List keys, Signer signer, + Digest nextKeys, int witnessThreshold, List witnesses, + Set configurationTraits, Version version, + DigestAlgorithm selfAddressingDigestAlgorithm, + SignatureAlgorithm signatureAlgorithm) { + this.derivation = derivation; + this.identifierDigestAlgorithm = identifierDigestAlgorithm; + this.signingThreshold = signingThreshold; + this.keys = List.copyOf(keys); + this.signer = signer; + this.nextKeys = nextKeys; + this.witnessThreshold = witnessThreshold; + this.witnesses = List.copyOf(witnesses); + this.configurationTraits = Set.copyOf(configurationTraits); + this.version = version; + this.selfAddressingDigestAlgorithm = selfAddressingDigestAlgorithm; + this.signatureAlgorithm = signatureAlgorithm; + } - public static Builder from(IdentifierSpec parseFrom) { - return new Builder(); + public static BasicIdentifier basic(PublicKey key) { + return new BasicIdentifier(key); + } + + public static D identifier(IdentifierSpecification spec, byte[] inceptionStatement) { + return Identifier.identifier(spec, ByteBuffer.wrap(inceptionStatement)); + } + + public static Builder newBuilder() { + return new Builder(); + } + + public static SelfAddressingIdentifier selfAddressing(byte[] inceptionStatement, DigestAlgorithm digestAlgorithm) { + return new SelfAddressingIdentifier(digestAlgorithm.digest(inceptionStatement)); + } + + public static SelfSigningIdentifier selfSigning(byte[] inceptionStatement, Signer signer) { + var signature = signer.sign(inceptionStatement); + return new SelfSigningIdentifier(signature); + } + + @Override + public Builder clone() { + Object clone; + try { + clone = super.clone(); + } catch (CloneNotSupportedException e) { + throw new IllegalStateException("Clone not supported", e); } + @SuppressWarnings("unchecked") + final var cast = (Builder) clone; + return cast; + } + + public Set getConfigurationTraits() { + return configurationTraits; + } + + public Class getDerivation() { + return derivation; + } + + public DigestAlgorithm getIdentifierDigestAlgorithm() { + return identifierDigestAlgorithm; + } + + public List getKeys() { + return keys; + } + + public Digest getNextKeys() { + return nextKeys; + } + + public DigestAlgorithm getSelfAddressingDigestAlgorithm() { + return selfAddressingDigestAlgorithm; + } + + public SignatureAlgorithm getSignatureAlgorithm() { + return signatureAlgorithm; + } + + public Signer getSigner() { + return signer; + } + + public SigningThreshold getSigningThreshold() { + return signingThreshold; + } + + public Version getVersion() { + return version; + } + + public int getWitnessThreshold() { + return witnessThreshold; + } + + public List getWitnesses() { + return witnesses; + } + + public static class Builder implements Cloneable { - private final EnumSet configurationTraits = EnumSet.noneOf(ConfigurationTrait.class); - private Class derivation = SelfAddressingIdentifier.class; - private DigestAlgorithm identifierDigestAlgorithm = DigestAlgorithm.BLAKE3_256; + private final EnumSet configurationTraits = EnumSet.noneOf( + ConfigurationTrait.class); private final List keys = new ArrayList<>(); private final List nextKeys = new ArrayList<>(); private final DigestAlgorithm nextKeysAlgorithm = DigestAlgorithm.BLAKE3_256; - private SigningThreshold nextSigningThreshold; - private DigestAlgorithm selfAddressingDigestAlgorithm = DigestAlgorithm.DEFAULT; - private SignatureAlgorithm signatureAlgorithm = SignatureAlgorithm.DEFAULT; - private Signer signer; - private SigningThreshold signingThreshold; - private Version version = Stereotomy.currentVersion(); private final List witnesses = new ArrayList<>(); - + private Class derivation = SelfAddressingIdentifier.class; + private DigestAlgorithm identifierDigestAlgorithm = DigestAlgorithm.BLAKE3_256; + private SigningThreshold nextSigningThreshold; + private DigestAlgorithm selfAddressingDigestAlgorithm = DigestAlgorithm.DEFAULT; + private SignatureAlgorithm signatureAlgorithm = SignatureAlgorithm.DEFAULT; + private Signer signer; + private SigningThreshold signingThreshold; + private Version version = Stereotomy.currentVersion(); private int witnessThreshold = 0; + public static Builder from(IdentifierSpec parseFrom) { + return new Builder(); + } + public Builder addKey(PublicKey key) { keys.add(requireNonNull(key)); return this; @@ -85,15 +198,17 @@ public IdentifierSpecification build() { if (signingThreshold instanceof SigningThreshold.Unweighted) { var unw = (SigningThreshold.Unweighted) signingThreshold; if (unw.getThreshold() > keys.size()) { - throw new IllegalArgumentException("Invalid unweighted signing threshold:" + " keys: " + keys.size() - + " threshold: " + unw.getThreshold()); + throw new IllegalArgumentException( + "Invalid unweighted signing threshold:" + " keys: " + keys.size() + " threshold: " + + unw.getThreshold()); } } else if (signingThreshold instanceof SigningThreshold.Weighted) { var w = (SigningThreshold.Weighted) signingThreshold; var countOfWeights = Stream.of(w.getWeights()).mapToLong(wts -> wts.length).sum(); if (countOfWeights != keys.size()) { - throw new IllegalArgumentException("Count of weights and count of keys are not equal: " + " keys: " - + keys.size() + " weights: " + countOfWeights); + throw new IllegalArgumentException( + "Count of weights and count of keys are not equal: " + " keys: " + keys.size() + " weights: " + + countOfWeights); } } else { throw new IllegalArgumentException("Unknown SigningThreshold type: " + signingThreshold.getClass()); @@ -110,15 +225,17 @@ public IdentifierSpecification build() { } else if (nextSigningThreshold instanceof SigningThreshold.Unweighted) { var unw = (SigningThreshold.Unweighted) nextSigningThreshold; if (unw.getThreshold() > keys.size()) { - throw new IllegalArgumentException("Invalid unweighted signing threshold:" + " keys: " + keys.size() - + " threshold: " + unw.getThreshold()); + throw new IllegalArgumentException( + "Invalid unweighted signing threshold:" + " keys: " + keys.size() + " threshold: " + + unw.getThreshold()); } } else if (nextSigningThreshold instanceof SigningThreshold.Weighted) { var w = (SigningThreshold.Weighted) nextSigningThreshold; var countOfWeights = Stream.of(w.getWeights()).mapToLong(wts -> wts.length).sum(); if (countOfWeights != keys.size()) { - throw new IllegalArgumentException("Count of weights and count of keys are not equal: " + " keys: " - + keys.size() + " weights: " + countOfWeights); + throw new IllegalArgumentException( + "Count of weights and count of keys are not equal: " + " keys: " + keys.size() + " weights: " + + countOfWeights); } } else { throw new IllegalArgumentException("Unknown SigningThreshold type: " + nextSigningThreshold.getClass()); @@ -138,8 +255,8 @@ public IdentifierSpecification build() { } if (!witnesses.isEmpty() && ((witnessThreshold < 1) || (witnessThreshold > witnesses.size()))) { - throw new RuntimeException("Invalid witness threshold:" + " witnesses: " + witnesses.size() - + " threshold: " + witnessThreshold); + throw new RuntimeException( + "Invalid witness threshold:" + " witnesses: " + witnesses.size() + " threshold: " + witnessThreshold); } // TODO test duplicate detection--need to write equals() hashcode for classes @@ -170,77 +287,17 @@ public EnumSet getConfigurationTraits() { return configurationTraits; } - public Class getDerivation() { - return derivation; - } - - public DigestAlgorithm getIdentifierDigestAlgorithm() { - return identifierDigestAlgorithm; - } - - public List getKeys() { - return keys; - } - - public List getNextKeys() { - return nextKeys; - } - - public DigestAlgorithm getNextKeysAlgorithm() { - return nextKeysAlgorithm; - } - - public SigningThreshold getNextSigningThreshold() { - return nextSigningThreshold; - } - - public DigestAlgorithm getSelfAddressingDigestAlgorithm() { - return selfAddressingDigestAlgorithm; - } - - public SignatureAlgorithm getSignatureAlgorithm() { - return signatureAlgorithm; - } - - public Signer getSigner() { - return signer; - } - - public SigningThreshold getSigningThreshold() { - return signingThreshold; - } - - public Version getVersion() { - return version; - } - - public List getWitnesses() { - return witnesses; - } - - public int getWitnessThreshold() { - return witnessThreshold; - } - - @SuppressWarnings("unchecked") - public Builder setBasic() { - derivation = BasicIdentifier.class; - return (Builder) this; - } - public Builder setConfigurationTraits(ConfigurationTrait... configurationTraits) { Collections.addAll(this.configurationTraits, configurationTraits); return this; } - public Builder setDoNotDelegate() { - configurationTraits.add(ConfigurationTrait.DO_NOT_DELEGATE); - return this; + public Class getDerivation() { + return derivation; } - public Builder setEstablishmentEventsOnly() { - configurationTraits.add(ConfigurationTrait.ESTABLISHMENT_EVENTS_ONLY); - return this; + public DigestAlgorithm getIdentifierDigestAlgorithm() { + return identifierDigestAlgorithm; } public Builder setIdentifierDigestAlgorithm(DigestAlgorithm algorithm) { @@ -248,6 +305,10 @@ public Builder setIdentifierDigestAlgorithm(DigestAlgorithm algorithm) { return this; } + public List getKeys() { + return keys; + } + public Builder setKeys(List keys) { requireNonNull(keys); @@ -259,12 +320,24 @@ public Builder setKeys(List keys) { return this; } + public List getNextKeys() { + return nextKeys; + } + public Builder setNextKeys(List nextKeys) { this.nextKeys.clear(); this.nextKeys.addAll(nextKeys); return this; } + public DigestAlgorithm getNextKeysAlgorithm() { + return nextKeysAlgorithm; + } + + public SigningThreshold getNextSigningThreshold() { + return nextSigningThreshold; + } + public Builder setNextSigningThreshold(int nextSigningThreshold) { if (nextSigningThreshold < 1) { throw new IllegalArgumentException("nextSigningThreshold must be 1 or greater"); @@ -280,21 +353,19 @@ public Builder setNextSigningThreshold(SigningThreshold nextSigningThreshold) return this; } - public Builder setSelfAddressing() { - derivation = SelfAddressingIdentifier.class; - return this; + public DigestAlgorithm getSelfAddressingDigestAlgorithm() { + return selfAddressingDigestAlgorithm; } @SuppressWarnings("unchecked") - public Builder setSelfAddressingDigestAlgorithm(DigestAlgorithm selfAddressingDigestAlgorithm) { + public Builder setSelfAddressingDigestAlgorithm( + DigestAlgorithm selfAddressingDigestAlgorithm) { this.selfAddressingDigestAlgorithm = selfAddressingDigestAlgorithm; return (Builder) this; } - @SuppressWarnings("unchecked") - public Builder setSelfSigning() { - derivation = SelfSigningIdentifier.class; - return (Builder) this; + public SignatureAlgorithm getSignatureAlgorithm() { + return signatureAlgorithm; } public Builder setSignatureAlgorithm(SignatureAlgorithm signatureAlgorithm) { @@ -302,11 +373,19 @@ public Builder setSignatureAlgorithm(SignatureAlgorithm signatureAlgorithm) { return this; } + public Signer getSigner() { + return signer; + } + public Builder setSigner(Signer signer) { this.signer = signer; return this; } + public SigningThreshold getSigningThreshold() { + return signingThreshold; + } + public Builder setSigningThreshold(int signingThreshold) { if (signingThreshold < 1) { throw new IllegalArgumentException("signingThreshold must be 1 or greater"); @@ -321,19 +400,17 @@ public Builder setSigningThreshold(SigningThreshold signingThreshold) { return this; } - public Builder setVersion(Version version) { - this.version = version; - return this; + public Version getVersion() { + return version; } - public Builder setWitness(BasicIdentifier witness) { - witnesses.add(requireNonNull(witness)); + public Builder setVersion(Version version) { + this.version = version; return this; } - public Builder setWitnesses(List witnesses) { - witnesses.addAll(requireNonNull(witnesses)); - return this; + public int getWitnessThreshold() { + return witnessThreshold; } public Builder setWitnessThreshold(int witnessThreshold) { @@ -345,124 +422,50 @@ public Builder setWitnessThreshold(int witnessThreshold) { return this; } - public IdentifierSpec toSpec() { - return IdentifierSpec.newBuilder().build(); + public List getWitnesses() { + return witnesses; } - } - - public static BasicIdentifier basic(PublicKey key) { - return new BasicIdentifier(key); - } - - public static D identifier(IdentifierSpecification spec, byte[] inceptionStatement) { - return Identifier.identifier(spec, ByteBuffer.wrap(inceptionStatement)); - } - - public static Builder newBuilder() { - return new Builder(); - } - - public static SelfAddressingIdentifier selfAddressing(byte[] inceptionStatement, DigestAlgorithm digestAlgorithm) { - return new SelfAddressingIdentifier(digestAlgorithm.digest(inceptionStatement)); - } - - public static SelfSigningIdentifier selfSigning(byte[] inceptionStatement, Signer signer) { - var signature = signer.sign(inceptionStatement); - return new SelfSigningIdentifier(signature); - } - - private final Set configurationTraits; - private final Class derivation; - private final DigestAlgorithm identifierDigestAlgorithm; - private final List keys; - private final Digest nextKeys; - private final DigestAlgorithm selfAddressingDigestAlgorithm; - private final SignatureAlgorithm signatureAlgorithm; - private final Signer signer; - private final SigningThreshold signingThreshold; - private final Version version; - private final List witnesses; - private final int witnessThreshold; - - private IdentifierSpecification(Class derivation, DigestAlgorithm identifierDigestAlgorithm, - SigningThreshold signingThreshold, List keys, Signer signer, - Digest nextKeys, int witnessThreshold, List witnesses, - Set configurationTraits, Version version, - DigestAlgorithm selfAddressingDigestAlgorithm, - SignatureAlgorithm signatureAlgorithm) { - this.derivation = derivation; - this.identifierDigestAlgorithm = identifierDigestAlgorithm; - this.signingThreshold = signingThreshold; - this.keys = List.copyOf(keys); - this.signer = signer; - this.nextKeys = nextKeys; - this.witnessThreshold = witnessThreshold; - this.witnesses = List.copyOf(witnesses); - this.configurationTraits = Set.copyOf(configurationTraits); - this.version = version; - this.selfAddressingDigestAlgorithm = selfAddressingDigestAlgorithm; - this.signatureAlgorithm = signatureAlgorithm; - } - @Override - public Builder clone() { - Object clone; - try { - clone = super.clone(); - } catch (CloneNotSupportedException e) { - throw new IllegalStateException("Clone not supported", e); + public Builder setWitnesses(List witnesses) { + witnesses.addAll(requireNonNull(witnesses)); + return this; } - @SuppressWarnings("unchecked") - final var cast = (Builder) clone; - return cast; - } - - public Set getConfigurationTraits() { - return configurationTraits; - } - - public Class getDerivation() { - return derivation; - } - - public DigestAlgorithm getIdentifierDigestAlgorithm() { - return identifierDigestAlgorithm; - } - - public List getKeys() { - return keys; - } - public Digest getNextKeys() { - return nextKeys; - } - - public DigestAlgorithm getSelfAddressingDigestAlgorithm() { - return selfAddressingDigestAlgorithm; - } + @SuppressWarnings("unchecked") + public Builder setBasic() { + derivation = BasicIdentifier.class; + return (Builder) this; + } - public SignatureAlgorithm getSignatureAlgorithm() { - return signatureAlgorithm; - } + public Builder setDoNotDelegate() { + configurationTraits.add(ConfigurationTrait.DO_NOT_DELEGATE); + return this; + } - public Signer getSigner() { - return signer; - } + public Builder setEstablishmentEventsOnly() { + configurationTraits.add(ConfigurationTrait.ESTABLISHMENT_EVENTS_ONLY); + return this; + } - public SigningThreshold getSigningThreshold() { - return signingThreshold; - } + public Builder setSelfAddressing() { + derivation = SelfAddressingIdentifier.class; + return this; + } - public Version getVersion() { - return version; - } + @SuppressWarnings("unchecked") + public Builder setSelfSigning() { + derivation = SelfSigningIdentifier.class; + return (Builder) this; + } - public List getWitnesses() { - return witnesses; - } + public Builder setWitness(BasicIdentifier witness) { + witnesses.add(requireNonNull(witness)); + return this; + } - public int getWitnessThreshold() { - return witnessThreshold; + public IdentifierSpec toSpec() { + return IdentifierSpec.newBuilder().build(); + } } } diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/spec/KeyConfigurationDigester.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/spec/KeyConfigurationDigester.java index f91179a63f..23dc386556 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/spec/KeyConfigurationDigester.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/spec/KeyConfigurationDigester.java @@ -6,30 +6,29 @@ */ package com.salesforce.apollo.stereotomy.identifier.spec; -import static com.salesforce.apollo.cryptography.QualifiedBase64.bs; -import static java.nio.charset.StandardCharsets.UTF_8; -import static java.util.stream.Collectors.joining; -import static java.util.stream.Collectors.toList; - -import java.security.PublicKey; -import java.util.List; -import java.util.stream.Stream; - import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.SigningThreshold; import com.salesforce.apollo.cryptography.SigningThreshold.Weighted.Weight; import com.salesforce.apollo.utils.Hex; +import java.security.PublicKey; +import java.util.List; +import java.util.stream.Stream; + +import static com.salesforce.apollo.cryptography.QualifiedBase64.bs; +import static java.nio.charset.StandardCharsets.UTF_8; +import static java.util.stream.Collectors.joining; +import static java.util.stream.Collectors.toList; + /** * @author hal.hildebrand - * */ public class KeyConfigurationDigester { public static Digest digest(SigningThreshold signingThreshold, List nextKeyDigests) { var st = signingThresholdRepresentation(signingThreshold); - var digestAlgorithm = nextKeyDigests.get(0).getAlgorithm(); + var digestAlgorithm = nextKeyDigests.getFirst().getAlgorithm(); var digest = digestAlgorithm.digest(st);// digest diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/spec/RotationSpecification.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/spec/RotationSpecification.java index 1cc1da4453..0b0d809796 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/spec/RotationSpecification.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/identifier/spec/RotationSpecification.java @@ -16,7 +16,7 @@ import org.joou.ULong; -import com.salesfoce.apollo.stereotomy.event.proto.RotationSpec; +import com.salesforce.apollo.stereotomy.event.proto.RotationSpec; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.SignatureAlgorithm; @@ -31,43 +31,125 @@ /** * @author hal.hildebrand - * */ public class RotationSpecification { - public static class Builder implements Cloneable { - public static Builder from(RotationSpec rotationSpec) { - return new Builder(); - } + private final List addedWitnesses; + private final Identifier identifier; + private final List keys; + private final Digest nextKeys; + private final EventCoordinates previous; + private final Digest priorEventDigest; + private final List removedWitnesses; + private final List seals; + private final ULong sequenceNumber; + private final Signer signer; + private final SigningThreshold signingThreshold; + private final Version version; + private final int witnessThreshold; + public RotationSpecification(Identifier identifier, ULong uLong, EventCoordinates previousEvent, + SigningThreshold signingThreshold, List keys, Signer signer, + Digest nextKeys, int witnessThreshold, List removedWitnesses, + List addedWitnesses, List seals, Version version, + Digest priorEventDigest) { + this.identifier = identifier; + this.sequenceNumber = uLong; + this.previous = previousEvent; + this.signingThreshold = signingThreshold; + this.keys = List.copyOf(keys); + this.signer = signer; + this.nextKeys = nextKeys; + this.witnessThreshold = witnessThreshold; + this.addedWitnesses = List.copyOf(addedWitnesses); + this.removedWitnesses = List.copyOf(removedWitnesses); + this.seals = List.copyOf(seals); + this.version = version; + this.priorEventDigest = priorEventDigest; + } + + public static Builder newBuilder() { + return new Builder(); + } + + public List getAddedWitnesses() { + return addedWitnesses; + } + + public Identifier getIdentifier() { + return identifier; + } + + public List getKeys() { + return keys; + } + + public Digest getNextKeys() { + return nextKeys; + } + + public EventCoordinates getPrevious() { + return previous; + } + + public Digest getPriorEventDigest() { + return priorEventDigest; + } + + public List getRemovedWitnesses() { + return removedWitnesses; + } + + public List getSeals() { + return seals; + } + + public ULong getSequenceNumber() { + return sequenceNumber; + } + + public Signer getSigner() { + return signer; + } + + public SigningThreshold getSigningThreshold() { + return signingThreshold; + } + + public Version getVersion() { + return version; + } - private EventCoordinates currentCoords; - private Digest currentDigest; + public int getWitnessThreshold() { + return witnessThreshold; + } + + public static class Builder implements Cloneable { private final List currentWitnesses = new ArrayList<>(); - private DigestAlgorithm digestAlgorithm = DigestAlgorithm.DEFAULT; - private Identifier identifier; private final List keys = new ArrayList<>(); private final List nextKeys = new ArrayList<>(); private final DigestAlgorithm nextKeysAlgorithm = DigestAlgorithm.BLAKE3_256; - private SigningThreshold nextSigningThreshold; private final List seals = new ArrayList<>(); - private SignatureAlgorithm signatureAlgorithm = SignatureAlgorithm.DEFAULT; - private Signer signer; - private SigningThreshold signingThreshold; - private Version version = Stereotomy.currentVersion(); private final List witnesses = new ArrayList<>(); - + private EventCoordinates currentCoords; + private Digest currentDigest; + private DigestAlgorithm digestAlgorithm = DigestAlgorithm.DEFAULT; + private Identifier identifier; + private SigningThreshold nextSigningThreshold; + private SignatureAlgorithm signatureAlgorithm = SignatureAlgorithm.DEFAULT; + private Signer signer; + private SigningThreshold signingThreshold; + private Version version = Stereotomy.currentVersion(); private int witnessThreshold = 0; public Builder() { } - public Builder addAllSeals(List seals) { - this.seals.addAll(requireNonNull(seals)); - return this; + public static Builder from(RotationSpec rotationSpec) { + return new Builder(); } - public Builder addddWitness(BasicIdentifier prefix) { - witnesses.add(requireNonNull(prefix)); + public Builder addAllSeals(List seals) { + this.seals.addAll(requireNonNull(seals)); return this; } @@ -86,6 +168,11 @@ public Builder addWitnesses(List prefixes) { return this; } + public Builder addddWitness(BasicIdentifier prefix) { + witnesses.add(requireNonNull(prefix)); + return this; + } + public RotationSpecification build() { // --- KEYS --- @@ -101,15 +188,17 @@ public RotationSpecification build() { if (signingThreshold instanceof SigningThreshold.Unweighted) { var unw = (SigningThreshold.Unweighted) signingThreshold; if (unw.getThreshold() > keys.size()) { - throw new IllegalArgumentException("Invalid unweighted signing threshold:" + " keys: " + keys.size() - + " threshold: " + unw.getThreshold()); + throw new IllegalArgumentException( + "Invalid unweighted signing threshold:" + " keys: " + keys.size() + " threshold: " + + unw.getThreshold()); } } else if (signingThreshold instanceof SigningThreshold.Weighted) { var w = (SigningThreshold.Weighted) signingThreshold; var countOfWeights = Stream.of(w.getWeights()).mapToLong(wts -> wts.length).sum(); if (countOfWeights != keys.size()) { - throw new IllegalArgumentException("Count of weights and count of keys are not equal: " + " keys: " - + keys.size() + " weights: " + countOfWeights); + throw new IllegalArgumentException( + "Count of weights and count of keys are not equal: " + " keys: " + keys.size() + " weights: " + + countOfWeights); } } else { throw new IllegalArgumentException("Unknown SigningThreshold type: " + signingThreshold.getClass()); @@ -123,22 +212,25 @@ public RotationSpecification build() { } else if (nextSigningThreshold instanceof SigningThreshold.Unweighted) { var unw = (SigningThreshold.Unweighted) nextSigningThreshold; if (unw.getThreshold() > keys.size()) { - throw new IllegalArgumentException("Invalid unweighted signing threshold:" + " keys: " + keys.size() - + " threshold: " + unw.getThreshold()); + throw new IllegalArgumentException( + "Invalid unweighted signing threshold:" + " keys: " + keys.size() + " threshold: " + + unw.getThreshold()); } } else if (nextSigningThreshold instanceof SigningThreshold.Weighted) { var w = (SigningThreshold.Weighted) nextSigningThreshold; var countOfWeights = Stream.of(w.getWeights()).mapToLong(wts -> wts.length).sum(); if (countOfWeights != keys.size()) { - throw new IllegalArgumentException("Count of weights and count of keys are not equal: " + " keys: " - + keys.size() + " weights: " + countOfWeights); + throw new IllegalArgumentException( + "Count of weights and count of keys are not equal: " + " keys: " + keys.size() + " weights: " + + countOfWeights); } } else { throw new IllegalArgumentException("Unknown SigningThreshold type: " + nextSigningThreshold.getClass()); } if (nextKeys.isEmpty()) { - throw new IllegalArgumentException("None of nextKeys, digestOfNextKeys, or nextKeyConfigurationDigest provided"); + throw new IllegalArgumentException( + "None of nextKeys, digestOfNextKeys, or nextKeyConfigurationDigest provided"); } var nextKeyConfigurationDigest = KeyConfigurationDigester.digest(nextSigningThreshold, nextKeys, @@ -171,106 +263,40 @@ public EventCoordinates getCurrentCoords() { return currentCoords; } - public Digest getCurrentDigest() { - return currentDigest; - } - - public DigestAlgorithm getDigestAlgorithm() { - return digestAlgorithm; - } - - public Identifier getIdentifier() { - return identifier; - } - - public List getKeys() { - return keys; - } - - public List getNextKeys() { - return nextKeys; - } - - public DigestAlgorithm getNextKeysAlgorithm() { - return nextKeysAlgorithm; - } - - public SigningThreshold getNextSigningThreshold() { - return nextSigningThreshold; - } - - public List getSeals() { - return seals; - } - - public SignatureAlgorithm getSignatureAlgorithm() { - return signatureAlgorithm; - } - - public Signer getSigner() { - return signer; - } - - public SigningThreshold getSigningThreshold() { - return signingThreshold; - } - - public Version getVersion() { - return version; - } - - public List getWitnesses() { - return witnesses; - } - - public int getWitnessThreshold() { - return witnessThreshold; - } - - public Builder removeWitness(BasicIdentifier identifier) { - if (!witnesses.remove(requireNonNull(identifier))) { - throw new IllegalArgumentException("witness not found in witness set"); - } - return this; - } - - public Builder removeWitnesses(BasicIdentifier... witnesses) { - for (var witness : witnesses) { - removeWitness(witness); - } - return this; - } - - public Builder removeWitnesses(List witnesses) { - for (var witness : witnesses) { - removeWitness(witness); - } - return this; - } - public Builder setCurrentCoords(EventCoordinates currentCoords) { this.currentCoords = currentCoords; return this; } + public Digest getCurrentDigest() { + return currentDigest; + } + public Builder setCurrentDigest(Digest currentDigest) { this.currentDigest = currentDigest; return this; } + public DigestAlgorithm getDigestAlgorithm() { + return digestAlgorithm; + } + public Builder setDigestAlgorithm(DigestAlgorithm digestAlgorithm) { this.digestAlgorithm = digestAlgorithm; return this; } + public Identifier getIdentifier() { + return identifier; + } + public Builder setIdentifier(Identifier identifier) { this.identifier = identifier; return this; } - public Builder setKey(PublicKey publicKey) { - keys.add(publicKey); - return this; + public List getKeys() { + return keys; } public Builder setKeys(List publicKeys) { @@ -278,12 +304,24 @@ public Builder setKeys(List publicKeys) { return this; } + public List getNextKeys() { + return nextKeys; + } + public Builder setNextKeys(List nextKeys) { this.nextKeys.clear(); this.nextKeys.addAll(nextKeys); return this; } + public DigestAlgorithm getNextKeysAlgorithm() { + return nextKeysAlgorithm; + } + + public SigningThreshold getNextSigningThreshold() { + return nextSigningThreshold; + } + public Builder setNextSigningThreshold(int nextSigningThreshold) { if (nextSigningThreshold < 1) { throw new IllegalArgumentException("nextSigningThreshold must be 1 or greater"); @@ -298,17 +336,33 @@ public Builder setNextSigningThreshold(SigningThreshold nextSigningThreshold) { return this; } + public List getSeals() { + return seals; + } + + public SignatureAlgorithm getSignatureAlgorithm() { + return signatureAlgorithm; + } + public Builder setSignatureAlgorithm(SignatureAlgorithm signatureAlgorithm) { this.signatureAlgorithm = signatureAlgorithm; return this; } + public Signer getSigner() { + return signer; + } + public Builder setSigner(Signer signer) { requireNonNull(signer); this.signer = signer; return this; } + public SigningThreshold getSigningThreshold() { + return signingThreshold; + } + public Builder setSigningThreshold(int signingThreshold) { if (signingThreshold < 1) { throw new IllegalArgumentException("signingThreshold must be 1 or greater"); @@ -323,11 +377,19 @@ public Builder setSigningThreshold(SigningThreshold signingThreshold) { return this; } + public Version getVersion() { + return version; + } + public Builder setVersion(Version version) { this.version = version; return this; } + public int getWitnessThreshold() { + return witnessThreshold; + } + public Builder setWitnessThreshold(int witnessThreshold) { if (witnessThreshold < 0) { throw new IllegalArgumentException("witnessThreshold must not be negative"); @@ -337,100 +399,40 @@ public Builder setWitnessThreshold(int witnessThreshold) { return this; } - public RotationSpec toSpec() { - return RotationSpec.newBuilder().build(); + public List getWitnesses() { + return witnesses; } - } - - public static Builder newBuilder() { - return new Builder(); - } - - private final List addedWitnesses; - private final Identifier identifier; - private final List keys; - private final Digest nextKeys; - private final EventCoordinates previous; - private final Digest priorEventDigest; - private final List removedWitnesses; - private final List seals; - private final ULong sequenceNumber; - private final Signer signer; - private final SigningThreshold signingThreshold; - private final Version version; - private final int witnessThreshold; - - public RotationSpecification(Identifier identifier, ULong uLong, EventCoordinates previousEvent, - SigningThreshold signingThreshold, List keys, Signer signer, - Digest nextKeys, int witnessThreshold, List removedWitnesses, - List addedWitnesses, List seals, Version version, - Digest priorEventDigest) { - this.identifier = identifier; - this.sequenceNumber = uLong; - this.previous = previousEvent; - this.signingThreshold = signingThreshold; - this.keys = List.copyOf(keys); - this.signer = signer; - this.nextKeys = nextKeys; - this.witnessThreshold = witnessThreshold; - this.addedWitnesses = List.copyOf(addedWitnesses); - this.removedWitnesses = List.copyOf(removedWitnesses); - this.seals = List.copyOf(seals); - this.version = version; - this.priorEventDigest = priorEventDigest; - } - - public List getAddedWitnesses() { - return addedWitnesses; - } - - public Identifier getIdentifier() { - return identifier; - } - - public List getKeys() { - return keys; - } - - public Digest getNextKeys() { - return nextKeys; - } - - public EventCoordinates getPrevious() { - return previous; - } - - public Digest getPriorEventDigest() { - return priorEventDigest; - } - - public List getRemovedWitnesses() { - return removedWitnesses; - } - - public List getSeals() { - return seals; - } + public Builder removeWitness(BasicIdentifier identifier) { + if (!witnesses.remove(requireNonNull(identifier))) { + throw new IllegalArgumentException("witness not found in witness set"); + } + return this; + } - public ULong getSequenceNumber() { - return sequenceNumber; - } + public Builder removeWitnesses(BasicIdentifier... witnesses) { + for (var witness : witnesses) { + removeWitness(witness); + } + return this; + } - public Signer getSigner() { - return signer; - } + public Builder removeWitnesses(List witnesses) { + for (var witness : witnesses) { + removeWitness(witness); + } + return this; + } - public SigningThreshold getSigningThreshold() { - return signingThreshold; - } + public Builder setKey(PublicKey publicKey) { + keys.add(publicKey); + return this; + } - public Version getVersion() { - return version; - } + public RotationSpec toSpec() { + return RotationSpec.newBuilder().build(); + } - public int getWitnessThreshold() { - return witnessThreshold; } } diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/mem/MemKERL.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/mem/MemKERL.java index fc0ff114d6..03784f9057 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/mem/MemKERL.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/mem/MemKERL.java @@ -12,6 +12,7 @@ import com.salesforce.apollo.stereotomy.EventCoordinates; import com.salesforce.apollo.stereotomy.KERL; import com.salesforce.apollo.stereotomy.KeyState; +import com.salesforce.apollo.stereotomy.caching.CachingKERL; import com.salesforce.apollo.stereotomy.event.AttachmentEvent; import com.salesforce.apollo.stereotomy.event.AttachmentEvent.Attachment; import com.salesforce.apollo.stereotomy.event.KeyEvent; @@ -29,7 +30,7 @@ /** * @author hal.hildebrand */ -public class MemKERL implements KERL { +public class MemKERL implements KERL.AppendKERL { private final DigestAlgorithm digestAlgorithm; // Order by @@ -122,6 +123,10 @@ public Void appendValidations(EventCoordinates coordinates, Map f.apply(this)); + } + @Override public Attachment getAttachment(EventCoordinates coordinates) { return receipts.get(coordinateOrdering(coordinates)); @@ -150,17 +155,17 @@ public KeyState getKeyState(Identifier identifier) { return stateHash == null ? null : keyState.get(stateHash); } - @Override - public Map getValidations(EventCoordinates coordinates) { - return validations.computeIfAbsent(coordinates, k -> Collections.emptyMap()); - } - @Override public KeyState getKeyState(Identifier identifier, ULong sequenceNumber) { var location = sequenceNumberToLocation.get(locationOrdering(identifier, sequenceNumber)); return location == null ? null : keyState.get(location); } + @Override + public Map getValidations(EventCoordinates coordinates) { + return validations.computeIfAbsent(coordinates, k -> Collections.emptyMap()); + } + private void append(KeyEvent event, KeyState newState) { String coordinates = coordinateOrdering(event.getCoordinates()); events.put(coordinates, event); diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/Binder.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/Binder.java index d16d4e75f5..145a18dd35 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/Binder.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/Binder.java @@ -14,12 +14,10 @@ import com.salesforce.apollo.stereotomy.identifier.Identifier; /** - * Bindings may be made between non transferable identifiers and any of the - * available Bound value types. Bindings are the signed Bound value by the key - * of the identifier of the binding. - * + * Bindings may be made between non transferable identifiers and any of the available Bound value types. Bindings are + * the signed Bound value by the key of the identifier of the binding. + * * @author hal.hildebrand - * */ public interface Binder { interface BinderService { @@ -28,15 +26,15 @@ interface BinderService { void unbind(Identifier identifier) throws TimeoutException; } - public record Bound(InceptionEvent identifier, String host, int port) {} + public record Bound(InceptionEvent identifier, String host, int port) { + } public record Binding(Bound value, JohnHancock signature) { - public static Binding from(com.salesfoce.apollo.stereotomy.event.proto.Binding binding) { - var isEmpty = binding.equals(com.salesfoce.apollo.stereotomy.event.proto.Binding.getDefaultInstance()); - return isEmpty ? null - : new Binding(new Bound(new InceptionEventImpl(binding.getValue().getIdentifier()), - binding.getValue().getHost(), binding.getValue().getPort()), - JohnHancock.from(binding.getSignature())); + public static Binding from(com.salesforce.apollo.stereotomy.event.proto.Binding binding) { + var isEmpty = binding.equals(com.salesforce.apollo.stereotomy.event.proto.Binding.getDefaultInstance()); + return isEmpty ? null : new Binding( + new Bound(new InceptionEventImpl(binding.getValue().getIdentifier()), binding.getValue().getHost(), + binding.getValue().getPort()), JohnHancock.from(binding.getSignature())); } } } diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoBinder.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoBinder.java index 1f4ca6180c..dba1e0641b 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoBinder.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoBinder.java @@ -8,12 +8,11 @@ import java.util.concurrent.CompletableFuture; -import com.salesfoce.apollo.stereotomy.event.proto.Binding; -import com.salesfoce.apollo.stereotomy.event.proto.Ident; +import com.salesforce.apollo.stereotomy.event.proto.Binding; +import com.salesforce.apollo.stereotomy.event.proto.Ident; /** * @author hal.hildebrand - * */ public interface ProtoBinder { CompletableFuture bind(Binding binding); diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoEventObserver.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoEventObserver.java index 26735095eb..414e22b105 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoEventObserver.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoEventObserver.java @@ -9,14 +9,13 @@ import java.util.List; import java.util.concurrent.CompletableFuture; -import com.salesfoce.apollo.stereotomy.event.proto.AttachmentEvent; -import com.salesfoce.apollo.stereotomy.event.proto.KERL_; -import com.salesfoce.apollo.stereotomy.event.proto.KeyEvent_; -import com.salesfoce.apollo.stereotomy.event.proto.Validations; +import com.salesforce.apollo.stereotomy.event.proto.AttachmentEvent; +import com.salesforce.apollo.stereotomy.event.proto.KERL_; +import com.salesforce.apollo.stereotomy.event.proto.KeyEvent_; +import com.salesforce.apollo.stereotomy.event.proto.Validations; /** * @author hal.hildebrand - * */ public interface ProtoEventObserver { diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoEventValidation.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoEventValidation.java index 6133d22aff..8838580f6c 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoEventValidation.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoEventValidation.java @@ -8,11 +8,10 @@ import java.util.concurrent.CompletableFuture; -import com.salesfoce.apollo.stereotomy.event.proto.KeyEvent_; +import com.salesforce.apollo.stereotomy.event.proto.KeyEvent_; /** * @author hal.hildebrand - * */ public interface ProtoEventValidation { CompletableFuture validate(KeyEvent_ event); diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoKERLAdapter.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoKERLAdapter.java index 2ceda5870d..19411fa155 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoKERLAdapter.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoKERLAdapter.java @@ -7,7 +7,6 @@ package com.salesforce.apollo.stereotomy.services.proto; import com.google.protobuf.Empty; -import com.salesfoce.apollo.stereotomy.event.proto.*; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.JohnHancock; import com.salesforce.apollo.stereotomy.EventCoordinates; @@ -18,6 +17,7 @@ import com.salesforce.apollo.stereotomy.event.EstablishmentEvent; import com.salesforce.apollo.stereotomy.event.KeyEvent; import com.salesforce.apollo.stereotomy.event.KeyStateWithEndorsementsAndValidations; +import com.salesforce.apollo.stereotomy.event.proto.*; import com.salesforce.apollo.stereotomy.event.protobuf.AttachmentEventImpl; import com.salesforce.apollo.stereotomy.event.protobuf.ProtobufEventFactory; import com.salesforce.apollo.stereotomy.identifier.Identifier; @@ -34,9 +34,9 @@ */ public class ProtoKERLAdapter implements ProtoKERLService { - private final KERL kerl; + private final KERL.AppendKERL kerl; - public ProtoKERLAdapter(KERL kerl) { + public ProtoKERLAdapter(KERL.AppendKERL kerl) { this.kerl = kerl; } diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoKERLProvider.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoKERLProvider.java index 519ebe8afe..dddb4b0760 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoKERLProvider.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoKERLProvider.java @@ -6,7 +6,7 @@ */ package com.salesforce.apollo.stereotomy.services.proto; -import com.salesfoce.apollo.stereotomy.event.proto.*; +import com.salesforce.apollo.stereotomy.event.proto.*; /** * @author hal.hildebrand diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoKERLService.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoKERLService.java index 5fc943ce03..98bfb06497 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoKERLService.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoKERLService.java @@ -7,7 +7,7 @@ package com.salesforce.apollo.stereotomy.services.proto; import com.google.protobuf.Empty; -import com.salesfoce.apollo.stereotomy.event.proto.*; +import com.salesforce.apollo.stereotomy.event.proto.*; import java.util.List; diff --git a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoResolver.java b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoResolver.java index efc7130490..312569972a 100644 --- a/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoResolver.java +++ b/stereotomy/src/main/java/com/salesforce/apollo/stereotomy/services/proto/ProtoResolver.java @@ -8,12 +8,11 @@ import java.util.Optional; -import com.salesfoce.apollo.stereotomy.event.proto.Binding; -import com.salesfoce.apollo.stereotomy.event.proto.Ident; +import com.salesforce.apollo.stereotomy.event.proto.Binding; +import com.salesforce.apollo.stereotomy.event.proto.Ident; /** * @author hal.hildebrand - * */ public interface ProtoResolver { /** diff --git a/stereotomy/src/test/java/com/salesforce/apollo/stereotomy/StereotomyTests.java b/stereotomy/src/test/java/com/salesforce/apollo/stereotomy/StereotomyTests.java index 9ec86f468e..3e3691ffd9 100644 --- a/stereotomy/src/test/java/com/salesforce/apollo/stereotomy/StereotomyTests.java +++ b/stereotomy/src/test/java/com/salesforce/apollo/stereotomy/StereotomyTests.java @@ -13,7 +13,7 @@ import com.salesforce.apollo.cryptography.Verifier; import com.salesforce.apollo.stereotomy.event.EstablishmentEvent; import com.salesforce.apollo.stereotomy.event.KeyEvent; -import com.salesforce.apollo.stereotomy.event.Seal.CoordinatesSeal; +import com.salesforce.apollo.stereotomy.event.Seal; import com.salesforce.apollo.stereotomy.event.Seal.DigestSeal; import com.salesforce.apollo.stereotomy.identifier.BasicIdentifier; import com.salesforce.apollo.stereotomy.identifier.Identifier; @@ -42,14 +42,14 @@ * @author hal.hildebrand */ public class StereotomyTests { - KERL kel; + KERL.AppendKERL kel; StereotomyKeyStore ks; - SecureRandom secureRandom; + SecureRandom secureRandom; @BeforeEach public void before() throws Exception { secureRandom = SecureRandom.getInstance("SHA1PRNG"); - secureRandom.setSeed(new byte[]{0}); + secureRandom.setSeed(new byte[] { 0 }); initializeKel(); // this makes the values of secureRandom deterministic ks = initializeKeyStore(); @@ -63,7 +63,7 @@ public void identifierInteraction() throws Exception { var digest = DigestAlgorithm.BLAKE3_256.digest("digest seal".getBytes()); var event = EventCoordinates.of(kel.getKeyEvent(i.getLastEstablishmentEvent())); - var seals = List.of(DigestSeal.construct(digest), DigestSeal.construct(digest), CoordinatesSeal.construct(event)); + var seals = List.of(DigestSeal.construct(digest), DigestSeal.construct(digest), Seal.construct(event)); i.rotate(); i.seal(InteractionSpecification.newBuilder()); @@ -92,7 +92,9 @@ public void identifierRotate() throws Exception { var digest = DigestAlgorithm.BLAKE3_256.digest("digest seal".getBytes()); var event = EventCoordinates.of(kel.getKeyEvent(i.getLastEstablishmentEvent())); - i.rotate(RotationSpecification.newBuilder().addAllSeals(List.of(DigestSeal.construct(digest), DigestSeal.construct(digest), CoordinatesSeal.construct(event)))); + i.rotate(RotationSpecification.newBuilder() + .addAllSeals(List.of(DigestSeal.construct(digest), DigestSeal.construct(digest), + Seal.construct(event)))); i.rotate(); } @@ -107,7 +109,8 @@ public void newIdentifier() throws Exception { assertTrue(identifier.getIdentifier() instanceof SelfAddressingIdentifier); var sap = (SelfAddressingIdentifier) identifier.getIdentifier(); assertEquals(DigestAlgorithm.DEFAULT, sap.getDigest().getAlgorithm()); - assertEquals("4cb6958622749694aedff3d48b8e402524562813bf2bdd11894a528edc965b4d", Hex.hex(sap.getDigest().getBytes())); + assertEquals("4cb6958622749694aedff3d48b8e402524562813bf2bdd11894a528edc965b4d", + Hex.hex(sap.getDigest().getBytes())); assertEquals(1, ((Unweighted) identifier.getSigningThreshold()).getThreshold()); @@ -115,7 +118,8 @@ public void newIdentifier() throws Exception { assertEquals(1, identifier.getKeys().size()); assertNotNull(identifier.getKeys().get(0)); - EstablishmentEvent lastEstablishmentEvent = (EstablishmentEvent) kel.getKeyEvent(identifier.getLastEstablishmentEvent()); + EstablishmentEvent lastEstablishmentEvent = (EstablishmentEvent) kel.getKeyEvent( + identifier.getLastEstablishmentEvent()); assertEquals(identifier.getKeys().get(0), lastEstablishmentEvent.getKeys().get(0)); var keyCoordinates = KeyCoordinates.of(lastEstablishmentEvent, 0); @@ -127,7 +131,11 @@ public void newIdentifier() throws Exception { assertTrue(identifier.getNextKeyConfigurationDigest().isPresent()); var keyStoreNextKeyPair = ks.getNextKey(keyCoordinates); assertTrue(keyStoreNextKeyPair.isPresent()); - var expectedNextKeys = KeyConfigurationDigester.digest(SigningThreshold.unweighted(1), List.of(keyStoreNextKeyPair.get().getPublic()), identifier.getNextKeyConfigurationDigest().get().getAlgorithm()); + var expectedNextKeys = KeyConfigurationDigester.digest(SigningThreshold.unweighted(1), + List.of(keyStoreNextKeyPair.get().getPublic()), + identifier.getNextKeyConfigurationDigest() + .get() + .getAlgorithm()); assertEquals(expectedNextKeys, identifier.getNextKeyConfigurationDigest().get()); // witnesses @@ -155,13 +163,15 @@ public void newIdentifierFromIdentifier() throws Exception { Stereotomy controller = new StereotomyImpl(ks, kel, secureRandom); ControlledIdentifier base = controller.newIdentifier(); - ControlledIdentifier identifier = base.newIdentifier(IdentifierSpecification.newBuilder()); + ControlledIdentifier identifier = base.newIdentifier( + IdentifierSpecification.newBuilder()); // identifier assertTrue(identifier.getIdentifier() instanceof SelfAddressingIdentifier); var sap = (SelfAddressingIdentifier) identifier.getIdentifier(); assertEquals(DigestAlgorithm.DEFAULT, sap.getDigest().getAlgorithm()); - assertEquals("092126af01f80ca28e7a99bbdce229c029be3bbfcb791e29ccb7a64e8019a36f", Hex.hex(sap.getDigest().getBytes())); + assertEquals("092126af01f80ca28e7a99bbdce229c029be3bbfcb791e29ccb7a64e8019a36f", + Hex.hex(sap.getDigest().getBytes())); assertEquals(1, ((Unweighted) identifier.getSigningThreshold()).getThreshold()); @@ -169,7 +179,8 @@ public void newIdentifierFromIdentifier() throws Exception { assertEquals(1, identifier.getKeys().size()); assertNotNull(identifier.getKeys().get(0)); - EstablishmentEvent lastEstablishmentEvent = (EstablishmentEvent) kel.getKeyEvent(identifier.getLastEstablishmentEvent()); + EstablishmentEvent lastEstablishmentEvent = (EstablishmentEvent) kel.getKeyEvent( + identifier.getLastEstablishmentEvent()); assertEquals(identifier.getKeys().get(0), lastEstablishmentEvent.getKeys().get(0)); var keyCoordinates = KeyCoordinates.of(lastEstablishmentEvent, 0); @@ -181,9 +192,11 @@ public void newIdentifierFromIdentifier() throws Exception { assertTrue(identifier.getNextKeyConfigurationDigest().isPresent()); var keyStoreNextKeyPair = ks.getNextKey(keyCoordinates); assertTrue(keyStoreNextKeyPair.isPresent()); - var expectedNextKeys = KeyConfigurationDigester.digest(SigningThreshold.unweighted(1), List.of(keyStoreNextKeyPair.get().getPublic()), identifier.getNextKeyConfigurationDigest() - .get() - .getAlgorithm()); + var expectedNextKeys = KeyConfigurationDigester.digest(SigningThreshold.unweighted(1), + List.of(keyStoreNextKeyPair.get().getPublic()), + identifier.getNextKeyConfigurationDigest() + .get() + .getAlgorithm()); assertEquals(expectedNextKeys, identifier.getNextKeyConfigurationDigest().get()); // witnesses @@ -208,7 +221,7 @@ public void newIdentifierFromIdentifier() throws Exception { var digest = DigestAlgorithm.BLAKE3_256.digest("digest seal".getBytes()); var event = EventCoordinates.of(kel.getKeyEvent(identifier.getLastEstablishmentEvent())); - var seals = List.of(DigestSeal.construct(digest), DigestSeal.construct(digest), CoordinatesSeal.construct(event)); + var seals = List.of(DigestSeal.construct(digest), DigestSeal.construct(digest), Seal.construct(event)); identifier.rotate(); identifier.seal(InteractionSpecification.newBuilder()); @@ -225,14 +238,14 @@ public void provision() throws Exception { provision(i, controller); } - protected StereotomyKeyStore initializeKeyStore() { - return new MemKeyStore(); - } - void initializeKel() throws Exception { kel = new MemKERL(DigestAlgorithm.DEFAULT); } + protected StereotomyKeyStore initializeKeyStore() { + return new MemKeyStore(); + } + private void provision(ControlledIdentifier identifier, Stereotomy controller) throws Exception { var now = Instant.now(); var cwpk = identifier.provision(now, Duration.ofSeconds(100), SignatureAlgorithm.DEFAULT); @@ -255,7 +268,8 @@ private void provision(ControlledIdentifier identifier, Stereotomy controller var verifiers = new Verifiers() { @Override public Optional verifierFor(EventCoordinates coordinates) { - return (identifier.getIdentifier().equals(coordinates.getIdentifier())) ? identifier.getVerifier() : Optional.empty(); + return (identifier.getIdentifier().equals(coordinates.getIdentifier())) ? identifier.getVerifier() + : Optional.empty(); } @Override diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/Ani.java b/thoth/src/main/java/com/salesforce/apollo/thoth/Ani.java index d4655e5c2c..271d32124a 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/Ani.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/Ani.java @@ -7,8 +7,10 @@ package com.salesforce.apollo.thoth; -import com.salesforce.apollo.cryptography.*; -import com.salesforce.apollo.cryptography.Verifier.Filtered; +import com.salesforce.apollo.cryptography.Digest; +import com.salesforce.apollo.cryptography.JohnHancock; +import com.salesforce.apollo.cryptography.SignatureAlgorithm; +import com.salesforce.apollo.cryptography.Verifier; import com.salesforce.apollo.cryptography.ssl.CertificateValidator; import com.salesforce.apollo.stereotomy.*; import com.salesforce.apollo.stereotomy.KEL.KeyStateWithAttachments; @@ -16,10 +18,10 @@ import com.salesforce.apollo.stereotomy.event.KeyEvent; import com.salesforce.apollo.stereotomy.identifier.Identifier; import com.salesforce.apollo.utils.BbBackedInputStream; +import org.joou.ULong; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.InputStream; import java.security.PublicKey; import java.time.Duration; import java.util.HashMap; @@ -34,11 +36,11 @@ public class Ani { private static final Logger log = LoggerFactory.getLogger(Ani.class); - private final Digest id; + private final Digest member; private final KERL kerl; - public Ani(Digest id, KERL kerl) { - this.id = id; + public Ani(Digest member, KERL kerl) { + this.member = member; this.kerl = kerl; } @@ -48,45 +50,24 @@ public CertificateValidator certificateValidator(Duration timeout) { public EventValidation eventValidation(Duration timeout) { return new EventValidation() { - @Override - public Filtered filtered(EventCoordinates coordinates, SigningThreshold threshold, JohnHancock signature, - InputStream message) { - - KeyState ks = kerl.getKeyState(coordinates); - var v = new Verifier.DefaultVerifier(ks.getKeys()); - return v.filtered(threshold, signature, message); - } @Override - public Optional getKeyState(EventCoordinates coordinates) { - return Optional.of(kerl.getKeyState(coordinates)); + public KeyState keyState(Identifier id, ULong sequenceNumber) { + return kerl.getKeyState(id, sequenceNumber); } @Override public boolean validate(EstablishmentEvent event) { + log.trace("Validate event: {} on: {}", event, member); return Ani.this.validateKerl(event, timeout); } @Override public boolean validate(EventCoordinates coordinates) { + log.trace("Validating coordinates: {} on: {}", coordinates, member); KeyEvent ke = kerl.getKeyEvent(coordinates); return Ani.this.validateKerl(ke, timeout); } - - @Override - public boolean verify(EventCoordinates coordinates, JohnHancock signature, InputStream message) { - KeyState ks = kerl.getKeyState(coordinates); - var v = new Verifier.DefaultVerifier(ks.getKeys()); - return v.verify(signature, message); - } - - @Override - public boolean verify(EventCoordinates coordinates, SigningThreshold threshold, JohnHancock signature, - InputStream message) { - KeyState ks = kerl.getKeyState(coordinates); - var v = new Verifier.DefaultVerifier(ks.getKeys()); - return v.verify(threshold, signature, message); - } }; } @@ -134,6 +115,7 @@ private boolean kerlValidate(Duration timeout, KeyStateWithAttachments ksa, KeyE event.toKeyEvent_() .toByteString())); } + log.trace("Kerl validation: {} for: {} on: {}", witnessed, ksa.state().getCoordinates(), member); return witnessed; } @@ -146,5 +128,4 @@ private boolean performKerlValidation(EventCoordinates coord, Duration timeout) private boolean validateKerl(KeyEvent event, Duration timeout) { return performKerlValidation(event.getCoordinates(), timeout); } - } diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/CombinedIntervals.java b/thoth/src/main/java/com/salesforce/apollo/thoth/CombinedIntervals.java index a0dfb448dc..4155457f74 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/CombinedIntervals.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/CombinedIntervals.java @@ -7,21 +7,16 @@ package com.salesforce.apollo.thoth; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; +import com.salesforce.apollo.thoth.proto.Interval; +import com.salesforce.apollo.cryptography.Digest; + +import java.util.*; import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; -import com.salesfoce.apollo.thoth.proto.Interval; -import com.salesforce.apollo.cryptography.Digest; - /** * @author hal.hildebrand - * */ public class CombinedIntervals implements Predicate { private final List intervals = new ArrayList<>(); @@ -40,8 +35,8 @@ public int compare(KeyInterval o1, KeyInterval o2) { int comparison = o1.getBegin().compareTo(o2.getBegin()); return comparison == 0 // if both intervals begin the same - ? o1.getEnd().compareTo(o2.getEnd()) // compare their ends - : comparison; + ? o1.getEnd().compareTo(o2.getEnd()) // compare their ends + : comparison; } }); KeyInterval current = allIntervals.get(0); diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/DirectPublisher.java b/thoth/src/main/java/com/salesforce/apollo/thoth/DirectPublisher.java index 35b574d439..b99b5cc93b 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/DirectPublisher.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/DirectPublisher.java @@ -6,10 +6,10 @@ */ package com.salesforce.apollo.thoth; -import com.salesfoce.apollo.stereotomy.event.proto.AttachmentEvent; -import com.salesfoce.apollo.stereotomy.event.proto.KERL_; -import com.salesfoce.apollo.stereotomy.event.proto.KeyEvent_; -import com.salesfoce.apollo.stereotomy.event.proto.Validations; +import com.salesforce.apollo.stereotomy.event.proto.AttachmentEvent; +import com.salesforce.apollo.stereotomy.event.proto.KERL_; +import com.salesforce.apollo.stereotomy.event.proto.KeyEvent_; +import com.salesforce.apollo.stereotomy.event.proto.Validations; import com.salesforce.apollo.stereotomy.services.proto.ProtoEventObserver; import com.salesforce.apollo.stereotomy.services.proto.ProtoKERLAdapter; import org.slf4j.Logger; diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/KerlDHT.java b/thoth/src/main/java/com/salesforce/apollo/thoth/KerlDHT.java index 6091ff44f7..45521e8e42 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/KerlDHT.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/KerlDHT.java @@ -12,32 +12,27 @@ import com.google.common.collect.Multiset.Entry; import com.google.common.collect.Ordering; import com.google.protobuf.Empty; -import com.salesfoce.apollo.stereotomy.event.proto.*; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.KeyStates; -import com.salesfoce.apollo.thoth.proto.Intervals; -import com.salesfoce.apollo.thoth.proto.Update; -import com.salesfoce.apollo.thoth.proto.Updating; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.RouterImpl.CommonCommunications; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; +import com.salesforce.apollo.cryptography.Verifier; import com.salesforce.apollo.membership.Context; import com.salesforce.apollo.membership.Member; import com.salesforce.apollo.membership.Ring; import com.salesforce.apollo.membership.SigningMember; import com.salesforce.apollo.ring.RingCommunications; import com.salesforce.apollo.ring.RingIterator; -import com.salesforce.apollo.stereotomy.DelegatedKERL; -import com.salesforce.apollo.stereotomy.EventCoordinates; -import com.salesforce.apollo.stereotomy.KERL; -import com.salesforce.apollo.stereotomy.KeyState; +import com.salesforce.apollo.stereotomy.*; import com.salesforce.apollo.stereotomy.caching.CachingKERL; import com.salesforce.apollo.stereotomy.db.UniKERLDirectPooled; import com.salesforce.apollo.stereotomy.db.UniKERLDirectPooled.ClosableKERL; import com.salesforce.apollo.stereotomy.event.KeyEvent; +import com.salesforce.apollo.stereotomy.event.proto.*; import com.salesforce.apollo.stereotomy.identifier.Identifier; import com.salesforce.apollo.stereotomy.services.grpc.StereotomyMetrics; import com.salesforce.apollo.stereotomy.services.grpc.kerl.KERLAdapter; +import com.salesforce.apollo.stereotomy.services.grpc.proto.KeyStates; import com.salesforce.apollo.stereotomy.services.proto.ProtoKERLAdapter; import com.salesforce.apollo.stereotomy.services.proto.ProtoKERLService; import com.salesforce.apollo.thoth.LoggingOutputStream.LogLevel; @@ -48,7 +43,11 @@ import com.salesforce.apollo.thoth.grpc.reconciliation.ReconciliationClient; import com.salesforce.apollo.thoth.grpc.reconciliation.ReconciliationServer; import com.salesforce.apollo.thoth.grpc.reconciliation.ReconciliationService; +import com.salesforce.apollo.thoth.proto.Intervals; +import com.salesforce.apollo.thoth.proto.Update; +import com.salesforce.apollo.thoth.proto.Updating; import com.salesforce.apollo.utils.Entropy; +import com.salesforce.apollo.utils.Utils; import liquibase.Liquibase; import liquibase.Scope; import liquibase.Scope.Attr; @@ -90,13 +89,15 @@ public class KerlDHT implements ProtoKERLService { private final static Logger log = LoggerFactory.getLogger( KerlDHT.class); + private final static Logger reconcileLog = LoggerFactory.getLogger( + KerlSpace.class); private final Ani ani; private final CachingKERL cache; private final JdbcConnectionPool connectionPool; private final Context context; private final CommonCommunications dhtComms; private final double fpr; - private final Duration frequency; + private final Duration operationsFrequency; private final CachingKERL kerl; private final UniKERLDirectPooled kerlPool; private final KerlSpace kerlSpace; @@ -107,19 +108,19 @@ public class KerlDHT implements ProtoKERLService { private final ScheduledExecutorService scheduler; private final Service service = new Service(); private final AtomicBoolean started = new AtomicBoolean(); - private final TemporalAmount timeout; + private final TemporalAmount operationTimeout; - public KerlDHT(Duration frequency, Context context, SigningMember member, - BiFunction wrap, JdbcConnectionPool connectionPool, - DigestAlgorithm digestAlgorithm, Router communications, TemporalAmount timeout, + public KerlDHT(Duration operationsFrequency, Context context, SigningMember member, + BiFunction wrap, JdbcConnectionPool connectionPool, + DigestAlgorithm digestAlgorithm, Router communications, TemporalAmount operationTimeout, double falsePositiveRate, StereotomyMetrics metrics) { @SuppressWarnings("unchecked") final var casting = (Context) context; this.context = casting; this.member = member; - this.timeout = timeout; + this.operationTimeout = operationTimeout; this.fpr = falsePositiveRate; - this.frequency = frequency; + this.operationsFrequency = operationsFrequency; this.scheduler = Executors.newScheduledThreadPool(1, Thread.ofVirtual().factory()); this.cache = new CachingKERL(f -> { try { @@ -156,11 +157,11 @@ public KerlDHT(Duration frequency, Context context, SigningMem this.ani = new Ani(member.getId(), asKERL()); } - public KerlDHT(Duration frequency, Context context, SigningMember member, + public KerlDHT(Duration operationsFrequency, Context context, SigningMember member, JdbcConnectionPool connectionPool, DigestAlgorithm digestAlgorithm, Router communications, - TemporalAmount timeout, double falsePositiveRate, StereotomyMetrics metrics) { - this(frequency, context, member, (t, k) -> k, connectionPool, digestAlgorithm, communications, timeout, - falsePositiveRate, metrics); + TemporalAmount operationTimeout, double falsePositiveRate, StereotomyMetrics metrics) { + this(operationsFrequency, context, member, (t, k) -> k, connectionPool, digestAlgorithm, communications, + operationTimeout, falsePositiveRate, metrics); } public static void updateLocationHash(Identifier identifier, DigestAlgorithm digestAlgorithm, DSLContext dsl) { @@ -186,13 +187,6 @@ static T completeIt(T result) { return result; } - /** - * Clear the caches of the receiver - */ - public void clearCache() { - cache.clear(); - } - public KeyState_ append(AttachmentEvent event) { if (event == null) { return null; @@ -202,21 +196,23 @@ public KeyState_ append(AttachmentEvent event) { if (identifier == null) { return null; } - Instant timedOut = Instant.now().plus(timeout); + Instant timedOut = Instant.now().plus(operationTimeout); Supplier isTimedOut = () -> Instant.now().isAfter(timedOut); var result = new CompletableFuture(); HashMultiset gathered = HashMultiset.create(); - new RingIterator<>(frequency, context, member, scheduler, dhtComms).noDuplicates() - .iterate(identifier, null, - (link, r) -> link.append( - Collections.emptyList(), - Collections.singletonList(event)), - null, - (tally, futureSailor, destination) -> mutate( - gathered, futureSailor, identifier, - isTimedOut, tally, destination, - "append events"), - t -> completeIt(result, gathered)); + new RingIterator<>(operationsFrequency, context, member, scheduler, dhtComms).noDuplicates() + .iterate(identifier, null, + (link, r) -> link.append( + Collections.emptyList(), + Collections.singletonList( + event)), null, + (tally, futureSailor, destination) -> mutate( + gathered, futureSailor, + identifier, isTimedOut, + tally, destination, + "append events"), + t -> completeIt(result, + gathered)); try { List s = result.get().getKeyStatesList(); return s.isEmpty() ? null : s.getFirst(); @@ -238,19 +234,21 @@ public List append(KERL_ kerl) { if (identifier == null) { return completeIt(Collections.emptyList()); } - Instant timedOut = Instant.now().plus(timeout); + Instant timedOut = Instant.now().plus(operationTimeout); Supplier isTimedOut = () -> Instant.now().isAfter(timedOut); var result = new CompletableFuture(); HashMultiset gathered = HashMultiset.create(); - new RingIterator<>(frequency, context, member, scheduler, dhtComms).noDuplicates() - .iterate(identifier, null, - (link, r) -> link.append(kerl), - null, - (tally, futureSailor, destination) -> mutate( - gathered, futureSailor, identifier, - isTimedOut, tally, destination, - "append kerl"), - t -> completeIt(result, gathered)); + new RingIterator<>(operationsFrequency, context, member, scheduler, dhtComms).noDuplicates() + .iterate(identifier, null, + (link, r) -> link.append( + kerl), null, + (tally, futureSailor, destination) -> mutate( + gathered, futureSailor, + identifier, isTimedOut, + tally, destination, + "append kerl"), + t -> completeIt(result, + gathered)); try { return result.get().getKeyStatesList(); } catch (InterruptedException e) { @@ -266,20 +264,22 @@ public KeyState_ append(KeyEvent_ event) { if (identifier == null) { return null; } - Instant timedOut = Instant.now().plus(timeout); + Instant timedOut = Instant.now().plus(operationTimeout); Supplier isTimedOut = () -> Instant.now().isAfter(timedOut); var result = new CompletableFuture(); HashMultiset gathered = HashMultiset.create(); - new RingIterator<>(frequency, context, member, scheduler, dhtComms).noDuplicates() - .iterate(identifier, null, - (link, r) -> link.append( - Collections.singletonList(event)), - null, - (tally, futureSailor, destination) -> mutate( - gathered, futureSailor, identifier, - isTimedOut, tally, destination, - "append events"), - t -> completeIt(result, gathered)); + new RingIterator<>(operationsFrequency, context, member, scheduler, dhtComms).noDuplicates() + .iterate(identifier, null, + (link, r) -> link.append( + Collections.singletonList( + event)), null, + (tally, futureSailor, destination) -> mutate( + gathered, futureSailor, + identifier, isTimedOut, + tally, destination, + "append events"), + t -> completeIt(result, + gathered)); try { var ks = result.get(); return ks.getKeyStatesCount() == 0 ? KeyState_.getDefaultInstance() : ks.getKeyStatesList().get(0); @@ -327,19 +327,21 @@ public Empty appendAttachments(List events) { if (identifier == null) { return completeIt(Empty.getDefaultInstance()); } - Instant timedOut = Instant.now().plus(timeout); + Instant timedOut = Instant.now().plus(operationTimeout); Supplier isTimedOut = () -> Instant.now().isAfter(timedOut); var result = new CompletableFuture(); HashMultiset gathered = HashMultiset.create(); - new RingIterator<>(frequency, context, member, scheduler, dhtComms).noDuplicates() - .iterate(identifier, null, - (link, r) -> link.appendAttachments( - events), null, - (tally, futureSailor, destination) -> mutate( - gathered, futureSailor, identifier, - isTimedOut, tally, destination, - "append attachments"), - t -> completeIt(result, gathered)); + new RingIterator<>(operationsFrequency, context, member, scheduler, dhtComms).noDuplicates() + .iterate(identifier, null, + (link, r) -> link.appendAttachments( + events), null, + (tally, futureSailor, destination) -> mutate( + gathered, futureSailor, + identifier, isTimedOut, + tally, destination, + "append attachments"), + t -> completeIt(result, + gathered)); return Empty.getDefaultInstance(); } @@ -352,19 +354,21 @@ public Empty appendValidations(Validations validations) { if (identifier == null) { return completeIt(null); } - Instant timedOut = Instant.now().plus(timeout); + Instant timedOut = Instant.now().plus(operationTimeout); Supplier isTimedOut = () -> Instant.now().isAfter(timedOut); var result = new CompletableFuture(); HashMultiset gathered = HashMultiset.create(); - new RingIterator<>(frequency, context, member, scheduler, dhtComms).noDuplicates() - .iterate(identifier, null, - (link, r) -> link.appendValidations( - validations), null, - (tally, futureSailor, destination) -> mutate( - gathered, futureSailor, identifier, - isTimedOut, tally, destination, - "append validations"), - t -> completeIt(result, gathered)); + new RingIterator<>(operationsFrequency, context, member, scheduler, dhtComms).noDuplicates() + .iterate(identifier, null, + (link, r) -> link.appendValidations( + validations), null, + (tally, futureSailor, destination) -> mutate( + gathered, futureSailor, + identifier, isTimedOut, + tally, destination, + "append validations"), + t -> completeIt(result, + gathered)); try { return result.get(); } catch (InterruptedException e) { @@ -375,10 +379,17 @@ public Empty appendValidations(Validations validations) { } } - public KERL asKERL() { + public KERL.AppendKERL asKERL() { return cache; } + /** + * Clear the caches of the receiver + */ + public void clearCache() { + cache.clear(); + } + public DigestAlgorithm digestAlgorithm() { return kerlPool.getDigestAlgorithm(); } @@ -396,26 +407,26 @@ public Attachment getAttachment(EventCoords coordinates) { if (identifier == null) { return completeIt(Attachment.getDefaultInstance()); } - Instant timedOut = Instant.now().plus(timeout); + Instant timedOut = Instant.now().plus(operationTimeout); Supplier isTimedOut = () -> Instant.now().isAfter(timedOut); var result = new CompletableFuture(); HashMultiset gathered = HashMultiset.create(); - new RingIterator<>(frequency, context, member, scheduler, dhtComms).noDuplicates() - .iterate(identifier, null, - (link, r) -> link.getAttachment( - coordinates), - () -> failedMajority(result, - maxCount( - gathered)), - (tally, futureSailor, destination) -> read( - result, gathered, tally, - futureSailor, identifier, - isTimedOut, destination, - "get attachment", - Attachment.getDefaultInstance()), - t -> failedMajority(result, - maxCount( - gathered))); + new RingIterator<>(operationsFrequency, context, member, scheduler, dhtComms).noDuplicates() + .iterate(identifier, null, + (link, r) -> link.getAttachment( + coordinates), + () -> failedMajority( + result, + maxCount(gathered)), + (tally, futureSailor, destination) -> read( + result, gathered, tally, + futureSailor, identifier, + isTimedOut, destination, + "get attachment", + Attachment.getDefaultInstance()), + t -> failedMajority( + result, + maxCount(gathered))); try { return result.get(); } catch (InterruptedException e) { @@ -435,25 +446,26 @@ public KERL_ getKERL(Ident identifier) { if (digest == null) { return completeIt(KERL_.getDefaultInstance()); } - Instant timedOut = Instant.now().plus(timeout); + Instant timedOut = Instant.now().plus(operationTimeout); Supplier isTimedOut = () -> Instant.now().isAfter(timedOut); var result = new CompletableFuture(); HashMultiset gathered = HashMultiset.create(); - new RingIterator<>(frequency, context, member, scheduler, dhtComms).noDuplicates() - .iterate(digest, null, - (link, r) -> link.getKERL( - identifier), - () -> failedMajority(result, - maxCount( - gathered)), - (tally, futureSailor, destination) -> read( - result, gathered, tally, - futureSailor, digest, isTimedOut, - destination, "get kerl", - KERL_.getDefaultInstance()), - t -> failedMajority(result, - maxCount( - gathered))); + new RingIterator<>(operationsFrequency, context, member, scheduler, dhtComms).noDuplicates() + .iterate(digest, null, + (link, r) -> link.getKERL( + identifier), + () -> failedMajority( + result, + maxCount(gathered)), + (tally, futureSailor, destination) -> read( + result, gathered, tally, + futureSailor, digest, + isTimedOut, destination, + "get kerl", + KERL_.getDefaultInstance()), + t -> failedMajority( + result, + maxCount(gathered))); try { return result.get(); } catch (InterruptedException e) { @@ -474,25 +486,26 @@ public KeyEvent_ getKeyEvent(EventCoords coordinates) { if (digest == null) { return completeIt(KeyEvent_.getDefaultInstance()); } - Instant timedOut = Instant.now().plus(timeout); + Instant timedOut = Instant.now().plus(operationTimeout); Supplier isTimedOut = () -> Instant.now().isAfter(timedOut); var result = new CompletableFuture(); HashMultiset gathered = HashMultiset.create(); - new RingIterator<>(frequency, context, member, scheduler, dhtComms).noDuplicates() - .iterate(digest, null, - (link, r) -> link.getKeyEvent( - coordinates), - () -> failedMajority(result, - maxCount( - gathered)), - (tally, futureSailor, destination) -> read( - result, gathered, tally, - futureSailor, digest, isTimedOut, - destination, "get key event", - KeyEvent_.getDefaultInstance()), - t -> failedMajority(result, - maxCount( - gathered))); + new RingIterator<>(operationsFrequency, context, member, scheduler, dhtComms).noDuplicates() + .iterate(digest, null, + (link, r) -> link.getKeyEvent( + coordinates), + () -> failedMajority( + result, + maxCount(gathered)), + (tally, futureSailor, destination) -> read( + result, gathered, tally, + futureSailor, digest, + isTimedOut, destination, + "get key event", + KeyEvent_.getDefaultInstance()), + t -> failedMajority( + result, + maxCount(gathered))); try { return result.get(); } catch (InterruptedException e) { @@ -513,26 +526,26 @@ public KeyState_ getKeyState(EventCoords coordinates) { if (digest == null) { return completeIt(KeyState_.getDefaultInstance()); } - Instant timedOut = Instant.now().plus(timeout); + Instant timedOut = Instant.now().plus(operationTimeout); Supplier isTimedOut = () -> Instant.now().isAfter(timedOut); var result = new CompletableFuture(); HashMultiset gathered = HashMultiset.create(); - new RingIterator<>(frequency, context, member, scheduler, dhtComms).noDuplicates() - .iterate(digest, null, - (link, r) -> link.getKeyState( - coordinates), - () -> failedMajority(result, - maxCount( - gathered)), - (tally, futureSailor, destination) -> read( - result, gathered, tally, - futureSailor, digest, isTimedOut, - destination, - "get key state for coordinates", - KeyState_.getDefaultInstance()), - t -> failedMajority(result, - maxCount( - gathered))); + new RingIterator<>(operationsFrequency, context, member, scheduler, dhtComms).noDuplicates() + .iterate(digest, null, + (link, r) -> link.getKeyState( + coordinates), + () -> failedMajority( + result, + maxCount(gathered)), + (tally, futureSailor, destination) -> read( + result, gathered, tally, + futureSailor, digest, + isTimedOut, destination, + "get key state for coordinates", + KeyState_.getDefaultInstance()), + t -> failedMajority( + result, + maxCount(gathered))); try { return result.get(); } catch (InterruptedException e) { @@ -557,26 +570,26 @@ public KeyState_ getKeyState(Ident identifier, long sequenceNumber) { return completeIt(KeyState_.getDefaultInstance()); } var identAndSeq = IdentAndSeq.newBuilder().setIdentifier(identifier).setSequenceNumber(sequenceNumber).build(); - Instant timedOut = Instant.now().plus(timeout); + Instant timedOut = Instant.now().plus(operationTimeout); Supplier isTimedOut = () -> Instant.now().isAfter(timedOut); var result = new CompletableFuture(); HashMultiset gathered = HashMultiset.create(); - new RingIterator<>(frequency, context, member, scheduler, dhtComms).noDuplicates() - .iterate(digest, null, - (link, r) -> link.getKeyState( - identAndSeq), - () -> failedMajority(result, - maxCount( - gathered)), - (tally, futureSailor, destination) -> read( - result, gathered, tally, - futureSailor, digest, isTimedOut, - destination, - "get key state for coordinates", - KeyState_.getDefaultInstance()), - t -> failedMajority(result, - maxCount( - gathered))); + new RingIterator<>(operationsFrequency, context, member, scheduler, dhtComms).noDuplicates() + .iterate(digest, null, + (link, r) -> link.getKeyState( + identAndSeq), + () -> failedMajority( + result, + maxCount(gathered)), + (tally, futureSailor, destination) -> read( + result, gathered, tally, + futureSailor, digest, + isTimedOut, destination, + "get key state for coordinates", + KeyState_.getDefaultInstance()), + t -> failedMajority( + result, + maxCount(gathered))); try { return result.get(); } catch (InterruptedException e) { @@ -597,25 +610,25 @@ public KeyState_ getKeyState(Ident identifier) { if (digest == null) { return completeIt(KeyState_.getDefaultInstance()); } - Instant timedOut = Instant.now().plus(timeout); + Instant timedOut = Instant.now().plus(operationTimeout); Supplier isTimedOut = () -> Instant.now().isAfter(timedOut); var result = new CompletableFuture(); HashMultiset gathered = HashMultiset.create(); - new RingIterator<>(frequency, context, member, scheduler, dhtComms).iterate(digest, null, - (link, r) -> link.getKeyState( - identifier), - () -> failedMajority(result, - maxCount( - gathered)), - (tally, futureSailor, destination) -> read( - result, gathered, tally, - futureSailor, digest, isTimedOut, - destination, - "get current key state", - KeyState_.getDefaultInstance()), - t -> failedMajority(result, - maxCount( - gathered))); + new RingIterator<>(operationsFrequency, context, member, scheduler, dhtComms).iterate(digest, null, + (link, r) -> link.getKeyState( + identifier), + () -> failedMajority( + result, + maxCount(gathered)), + (tally, futureSailor, destination) -> read( + result, gathered, tally, + futureSailor, digest, + isTimedOut, destination, + "get current key state", + KeyState_.getDefaultInstance()), + t -> failedMajority( + result, + maxCount(gathered))); try { return result.get(); } catch (InterruptedException e) { @@ -636,25 +649,25 @@ public KeyStateWithAttachments_ getKeyStateWithAttachments(EventCoords coordinat if (digest == null) { return completeIt(KeyStateWithAttachments_.getDefaultInstance()); } - Instant timedOut = Instant.now().plus(timeout); + Instant timedOut = Instant.now().plus(operationTimeout); Supplier isTimedOut = () -> Instant.now().isAfter(timedOut); var result = new CompletableFuture(); HashMultiset gathered = HashMultiset.create(); - new RingIterator<>(frequency, context, member, scheduler, dhtComms).iterate(digest, null, - (link, r) -> link.getKeyStateWithAttachments( - coordinates), - () -> failedMajority(result, - maxCount( - gathered)), - (tally, futureSailor, destination) -> read( - result, gathered, tally, - futureSailor, digest, isTimedOut, - destination, - "get key state with attachments", - KeyStateWithAttachments_.getDefaultInstance()), - t -> failedMajority(result, - maxCount( - gathered))); + new RingIterator<>(operationsFrequency, context, member, scheduler, dhtComms).iterate(digest, null, + (link, r) -> link.getKeyStateWithAttachments( + coordinates), + () -> failedMajority( + result, + maxCount(gathered)), + (tally, futureSailor, destination) -> read( + result, gathered, tally, + futureSailor, digest, + isTimedOut, destination, + "get key state with attachments", + KeyStateWithAttachments_.getDefaultInstance()), + t -> failedMajority( + result, + maxCount(gathered))); try { return result.get(); } catch (InterruptedException e) { @@ -676,25 +689,25 @@ public KeyStateWithEndorsementsAndValidations_ getKeyStateWithEndorsementsAndVal if (digest == null) { return completeIt(KeyStateWithEndorsementsAndValidations_.getDefaultInstance()); } - Instant timedOut = Instant.now().plus(timeout); + Instant timedOut = Instant.now().plus(operationTimeout); Supplier isTimedOut = () -> Instant.now().isAfter(timedOut); var result = new CompletableFuture(); HashMultiset gathered = HashMultiset.create(); - new RingIterator<>(frequency, context, member, scheduler, dhtComms).iterate(digest, null, - (link, r) -> link.getKeyStateWithEndorsementsAndValidations( - coordinates), - () -> failedMajority(result, - maxCount( - gathered)), - (tally, futureSailor, destination) -> read( - result, gathered, tally, - futureSailor, digest, isTimedOut, - destination, - "get key state with endorsements", - KeyStateWithEndorsementsAndValidations_.getDefaultInstance()), - t -> failedMajority(result, - maxCount( - gathered))); + new RingIterator<>(operationsFrequency, context, member, scheduler, dhtComms).iterate(digest, null, + (link, r) -> link.getKeyStateWithEndorsementsAndValidations( + coordinates), + () -> failedMajority( + result, + maxCount(gathered)), + (tally, futureSailor, destination) -> read( + result, gathered, tally, + futureSailor, digest, + isTimedOut, destination, + "get key state with endorsements", + KeyStateWithEndorsementsAndValidations_.getDefaultInstance()), + t -> failedMajority( + result, + maxCount(gathered))); try { return result.get(); } catch (InterruptedException e) { @@ -715,25 +728,25 @@ public Validations getValidations(EventCoords coordinates) { if (identifier == null) { return completeIt(Validations.getDefaultInstance()); } - Instant timedOut = Instant.now().plus(timeout); + Instant timedOut = Instant.now().plus(operationTimeout); Supplier isTimedOut = () -> Instant.now().isAfter(timedOut); var result = new CompletableFuture(); HashMultiset gathered = HashMultiset.create(); - new RingIterator<>(frequency, context, member, scheduler, dhtComms).iterate(identifier, null, - (link, r) -> link.getValidations( - coordinates), - () -> failedMajority(result, - maxCount( - gathered)), - (tally, futureSailor, destination) -> read( - result, gathered, tally, - futureSailor, identifier, - isTimedOut, destination, - "get validations", - Validations.getDefaultInstance()), - t -> failedMajority(result, - maxCount( - gathered))); + new RingIterator<>(operationsFrequency, context, member, scheduler, dhtComms).iterate(identifier, null, + (link, r) -> link.getValidations( + coordinates), + () -> failedMajority( + result, + maxCount(gathered)), + (tally, futureSailor, destination) -> read( + result, gathered, tally, + futureSailor, identifier, + isTimedOut, destination, + "get validations", + Validations.getDefaultInstance()), + t -> failedMajority( + result, + maxCount(gathered))); try { return result.get(); } catch (InterruptedException e) { @@ -744,6 +757,20 @@ public Validations getValidations(EventCoords coordinates) { } } + public Verifiers getVerifiers() { + return new Verifiers() { + @Override + public Optional verifierFor(EventCoordinates coordinates) { + return verifierFor(coordinates.getIdentifier()); + } + + @Override + public Optional verifierFor(Identifier identifier) { + return Optional.of(new KerlVerifier(identifier, asKERL())); + } + }; + } + public Entry max(HashMultiset gathered) { return gathered.entrySet().stream().max(Ordering.natural().onResultOf(Multiset.Entry::getCount)).orElse(null); } @@ -800,6 +827,8 @@ private void completeIt(CompletableFuture result, HashMultiset gathere } private boolean failedMajority(CompletableFuture result, int maxAgree) { + log.error("Unable to achieve majority read, max: {} required: {} on: {}", maxAgree, context.majority(), + member.getId()); return result.completeExceptionally(new CompletionException( "Unable to achieve majority read, max: " + maxAgree + " required: " + context.majority() + " on: " + member.getId())); @@ -883,7 +912,9 @@ private boolean read(CompletableFuture result, HashMultiset gathered, var max = max(gathered); if (max != null) { tally.set(max.getCount()); - if (max.getCount() > context.toleranceLevel()) { + // If there is only one active member in our context, it's us. + final var majority = tally.get() >= (context.activeCount() == 1 ? 1 : context.majority()); + if (majority) { result.complete(max.getElement()); log.debug("Majority: {} achieved: {}: {} on: {}", max.getCount(), action, identifier, member.getId()); @@ -907,23 +938,29 @@ private void reconcile(Optional result, if (!result.isEmpty()) { try { Update update = result.get(); - log.trace("Received: {} events in interval reconciliation from: {} on: {}", update.getEventsCount(), - destination.member().getId(), member.getId()); - kerlSpace.update(update.getEventsList(), kerl); + if (update.getEventsCount() > 0) { + reconcileLog.trace("Received: {} events in interval reconciliation from: {} on: {}", + update.getEventsCount(), destination.member().getId(), member.getId()); + kerlSpace.update(update.getEventsList(), kerl); + } } catch (NoSuchElementException e) { - log.debug("null interval reconciliation with {} : {} on: {}", destination.member().getId(), - member.getId(), e.getCause()); + reconcileLog.debug("null interval reconciliation with {} : {} on: {}", destination.member().getId(), + member.getId(), e.getCause()); } } if (started.get()) { - scheduler.schedule(() -> reconcile(scheduler, duration), duration.toMillis(), TimeUnit.MILLISECONDS); + scheduler.schedule(() -> Thread.ofVirtual().start(Utils.wrapped(() -> reconcile(scheduler, duration), log)), + duration.toMillis(), TimeUnit.MILLISECONDS); } } private Update reconcile(ReconciliationService link, Integer ring) { + if (member.equals(link.getMember())) { + return null; + } CombinedIntervals keyIntervals = keyIntervals(); - log.trace("Interval reconciliation on ring: {} with: {} on: {} intervals: {}", ring, link.getMember(), - member.getId(), keyIntervals); + reconcileLog.trace("Interval reconciliation on ring: {} with: {} on: {} intervals: {}", ring, + link.getMember().getId(), member.getId(), keyIntervals); return link.reconcile(Intervals.newBuilder() .setRing(ring) .addAllIntervals(keyIntervals.toIntervals()) @@ -935,8 +972,10 @@ private void reconcile(ScheduledExecutorService scheduler, Duration duration) { if (!started.get()) { return; } - reconcile.execute((link, ring) -> reconcile(link, ring), - (futureSailor, destination) -> reconcile(futureSailor, destination, scheduler, duration)); + Thread.ofVirtual() + .start(() -> reconcile.execute((link, ring) -> reconcile(link, ring), + (futureSailor, destination) -> reconcile(futureSailor, destination, + scheduler, duration))); } @@ -964,10 +1003,7 @@ private boolean valid(Digest from, int ring) { if (successor == null) { return false; } - if (!successor.equals(member)) { - return false; - } - return true; + return successor.equals(member); } private DelegatedKERL wrap(ClosableKERL k) { @@ -1018,13 +1054,19 @@ private class Reconcile implements Reconciliation { public Update reconcile(Intervals intervals, Digest from) { var ring = intervals.getRing(); if (!valid(from, ring)) { + reconcileLog.trace("Invalid reconcile from: {} ring: {} on: {}", from, ring, member.getId()); return Update.getDefaultInstance(); } + reconcileLog.trace("Reconcile from: {} ring: {} on: {}", from, ring, member.getId()); try (var k = kerlPool.create()) { final var builder = KerlDHT.this.kerlSpace.reconcile(intervals, k); CombinedIntervals keyIntervals = keyIntervals(); builder.addAllIntervals(keyIntervals.toIntervals()) .setHave(kerlSpace.populate(Entropy.nextBitsStreamLong(), keyIntervals, fpr)); + if (builder.getEventsCount() > 0) { + reconcileLog.trace("Reconcile for: {} ring: {} count: {} on: {}", from, ring, + builder.getEventsCount(), member.getId()); + } return builder.build(); } catch (IOException | SQLException e) { throw new IllegalStateException("Cannot acquire KERL", e); @@ -1043,12 +1085,6 @@ public void update(Updating update, Digest from) { private class Service implements ProtoKERLService { - @Override - public Validations getValidations(EventCoords coordinates) { - log.trace("get validations for coordinates on: {}", member.getId()); - return complete(k -> k.getValidations(coordinates)); - } - @Override public List append(KERL_ kerl_) { log.info("appending kerl on: {}", member.getId()); @@ -1142,5 +1178,11 @@ public KeyStateWithEndorsementsAndValidations_ getKeyStateWithEndorsementsAndVal .build(); }); } + + @Override + public Validations getValidations(EventCoords coordinates) { + log.trace("get validations for coordinates on: {}", member.getId()); + return complete(k -> k.getValidations(coordinates)); + } } } diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/KerlSpace.java b/thoth/src/main/java/com/salesforce/apollo/thoth/KerlSpace.java index 0065c7ddc5..f45cb8e897 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/KerlSpace.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/KerlSpace.java @@ -8,23 +8,23 @@ package com.salesforce.apollo.thoth; import com.google.protobuf.InvalidProtocolBufferException; -import com.salesfoce.apollo.stereotomy.event.proto.*; -import com.salesfoce.apollo.thoth.proto.Intervals; -import com.salesfoce.apollo.thoth.proto.Update; -import com.salesfoce.apollo.cryptography.proto.Biff; -import com.salesfoce.apollo.cryptography.proto.Digeste; +import com.salesforce.apollo.bloomFilters.BloomFilter; +import com.salesforce.apollo.bloomFilters.BloomFilter.DigestBloomFilter; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.cryptography.JohnHancock; +import com.salesforce.apollo.cryptography.proto.Biff; +import com.salesforce.apollo.cryptography.proto.Digeste; import com.salesforce.apollo.stereotomy.DigestKERL; import com.salesforce.apollo.stereotomy.EventCoordinates; import com.salesforce.apollo.stereotomy.KERL; import com.salesforce.apollo.stereotomy.event.KeyEvent; +import com.salesforce.apollo.stereotomy.event.proto.*; import com.salesforce.apollo.stereotomy.event.protobuf.AttachmentEventImpl; import com.salesforce.apollo.stereotomy.event.protobuf.ProtobufEventFactory; import com.salesforce.apollo.stereotomy.identifier.Identifier; -import com.salesforce.apollo.bloomFilters.BloomFilter; -import com.salesforce.apollo.bloomFilters.BloomFilter.DigestBloomFilter; +import com.salesforce.apollo.thoth.proto.Intervals; +import com.salesforce.apollo.thoth.proto.Update; import org.h2.jdbcx.JdbcConnectionPool; import org.jooq.DSLContext; import org.jooq.Record1; @@ -56,8 +56,8 @@ * @author hal.hildebrand */ public class KerlSpace { - private static final Logger log = LoggerFactory.getLogger(KerlSpace.class); - private final JdbcConnectionPool connectionPool; + private static final Logger log = LoggerFactory.getLogger(KerlSpace.class); + private final JdbcConnectionPool connectionPool; public KerlSpace(JdbcConnectionPool connectionPool) { this.connectionPool = connectionPool; @@ -75,10 +75,10 @@ public static void upsert(DSLContext dsl, EventCoords coordinates, Attachment at id = dsl.insertInto(PENDING_COORDINATES) .set(PENDING_COORDINATES.DIGEST, coordinates.getDigest().toByteArray()) .set(PENDING_COORDINATES.IDENTIFIER, - dsl.select(IDENTIFIER.ID).from(IDENTIFIER).where(IDENTIFIER.PREFIX.eq(identBytes))) + dsl.select(IDENTIFIER.ID).from(IDENTIFIER).where(IDENTIFIER.PREFIX.eq(identBytes))) .set(PENDING_COORDINATES.ILK, coordinates.getIlk()) .set(PENDING_COORDINATES.SEQUENCE_NUMBER, - ULong.valueOf(coordinates.getSequenceNumber()).toBigInteger()) + ULong.valueOf(coordinates.getSequenceNumber()).toBigInteger()) .returningResult(PENDING_COORDINATES.ID) .fetchOne(); } catch (DataAccessException e) { @@ -89,8 +89,8 @@ public static void upsert(DSLContext dsl, EventCoords coordinates, Attachment at .on(IDENTIFIER.PREFIX.eq(coordinates.getIdentifier().toByteArray())) .where(PENDING_COORDINATES.IDENTIFIER.eq(IDENTIFIER.ID)) .and(PENDING_COORDINATES.DIGEST.eq(coordinates.getDigest().toByteArray())) - .and(PENDING_COORDINATES.SEQUENCE_NUMBER.eq(ULong.valueOf(coordinates.getSequenceNumber()) - .toBigInteger())) + .and(PENDING_COORDINATES.SEQUENCE_NUMBER.eq( + ULong.valueOf(coordinates.getSequenceNumber()).toBigInteger())) .and(PENDING_COORDINATES.ILK.eq(coordinates.getIlk())) .fetchOne(); } @@ -106,46 +106,45 @@ public static void upsert(DSLContext context, KeyEvent event, DigestAlgorithm di final var identBytes = event.getIdentifier().toIdent().toByteArray(); context.mergeInto(IDENTIFIER) - .using(context.selectOne()) - .on(IDENTIFIER.PREFIX.eq(identBytes)) - .whenNotMatchedThenInsert(IDENTIFIER.PREFIX) - .values(identBytes) - .execute(); + .using(context.selectOne()) + .on(IDENTIFIER.PREFIX.eq(identBytes)) + .whenNotMatchedThenInsert(IDENTIFIER.PREFIX) + .values(identBytes) + .execute(); long id; try { id = context.insertInto(PENDING_COORDINATES) - .set(PENDING_COORDINATES.DIGEST, prevCoords.getDigest().toDigeste().toByteArray()) - .set(PENDING_COORDINATES.IDENTIFIER, - context.select(IDENTIFIER.ID).from(IDENTIFIER).where(IDENTIFIER.PREFIX.eq(identBytes))) - .set(PENDING_COORDINATES.ILK, event.getIlk()) - .set(PENDING_COORDINATES.SEQUENCE_NUMBER, event.getSequenceNumber().toBigInteger()) - .returningResult(PENDING_COORDINATES.ID) - .fetchOne() - .value1(); + .set(PENDING_COORDINATES.DIGEST, prevCoords.getDigest().toDigeste().toByteArray()) + .set(PENDING_COORDINATES.IDENTIFIER, + context.select(IDENTIFIER.ID).from(IDENTIFIER).where(IDENTIFIER.PREFIX.eq(identBytes))) + .set(PENDING_COORDINATES.ILK, event.getIlk()) + .set(PENDING_COORDINATES.SEQUENCE_NUMBER, event.getSequenceNumber().toBigInteger()) + .returningResult(PENDING_COORDINATES.ID) + .fetchOne() + .value1(); } catch (DataAccessException e) { // Already exists var coordinates = event.getCoordinates(); id = context.select(PENDING_COORDINATES.ID) - .from(PENDING_COORDINATES) - .join(IDENTIFIER) - .on(IDENTIFIER.PREFIX.eq(coordinates.getIdentifier().toIdent().toByteArray())) - .where(PENDING_COORDINATES.IDENTIFIER.eq(IDENTIFIER.ID)) - .and(PENDING_COORDINATES.DIGEST.eq(coordinates.getDigest().toDigeste().toByteArray())) - .and(PENDING_COORDINATES.SEQUENCE_NUMBER.eq(coordinates.getSequenceNumber().toBigInteger())) - .and(PENDING_COORDINATES.ILK.eq(coordinates.getIlk())) - .fetchOne() - .value1(); + .from(PENDING_COORDINATES) + .join(IDENTIFIER) + .on(IDENTIFIER.PREFIX.eq(coordinates.getIdentifier().toIdent().toByteArray())) + .where(PENDING_COORDINATES.IDENTIFIER.eq(IDENTIFIER.ID)) + .and(PENDING_COORDINATES.DIGEST.eq(coordinates.getDigest().toDigeste().toByteArray())) + .and(PENDING_COORDINATES.SEQUENCE_NUMBER.eq(coordinates.getSequenceNumber().toBigInteger())) + .and(PENDING_COORDINATES.ILK.eq(coordinates.getIlk())) + .fetchOne() + .value1(); } final var digest = event.hash(digestAlgorithm); try { context.insertInto(PENDING_EVENT) - .set(PENDING_EVENT.COORDINATES, id) - .set(PENDING_EVENT.DIGEST, digest.toDigeste().toByteArray()) - .set(PENDING_EVENT.EVENT, event.getBytes()) - .execute(); + .set(PENDING_EVENT.COORDINATES, id) + .set(PENDING_EVENT.DIGEST, digest.toDigeste().toByteArray()) + .set(PENDING_EVENT.EVENT, event.getBytes()) + .execute(); } catch (DataAccessException e) { - return; } } @@ -158,11 +157,11 @@ public static void upsert(DSLContext dsl, Validations validations) { try { dsl.mergeInto(IDENTIFIER) - .using(dsl.selectOne()) - .on(IDENTIFIER.PREFIX.eq(identBytes)) - .whenNotMatchedThenInsert(IDENTIFIER.PREFIX) - .values(identBytes) - .execute(); + .using(dsl.selectOne()) + .on(IDENTIFIER.PREFIX.eq(identBytes)) + .whenNotMatchedThenInsert(IDENTIFIER.PREFIX) + .values(identBytes) + .execute(); } catch (DataAccessException e) { log.trace("Duplicate inserting identifier: {}", logIdentifier); } @@ -172,13 +171,13 @@ public static void upsert(DSLContext dsl, Validations validations) { id = dsl.insertInto(PENDING_COORDINATES) .set(PENDING_COORDINATES.DIGEST, coordinates.getDigest().toByteArray()) .set(PENDING_COORDINATES.IDENTIFIER, - dsl.select(IDENTIFIER.ID).from(IDENTIFIER).where(IDENTIFIER.PREFIX.eq(identBytes))) + dsl.select(IDENTIFIER.ID).from(IDENTIFIER).where(IDENTIFIER.PREFIX.eq(identBytes))) .set(PENDING_COORDINATES.ILK, coordinates.getIlk()) .set(PENDING_COORDINATES.SEQUENCE_NUMBER, - ULong.valueOf(coordinates.getSequenceNumber()).toBigInteger()) + ULong.valueOf(coordinates.getSequenceNumber()).toBigInteger()) .returningResult(PENDING_COORDINATES.ID) .fetchOne(); - log.trace("Id: {} for: {}", id, logCoords); + log.trace("Id: {} for: {}", id.value1(), logCoords); } catch (DataAccessException e) { log.trace("access exception for: {}", logCoords, e); // Already exists @@ -188,8 +187,8 @@ public static void upsert(DSLContext dsl, Validations validations) { .on(IDENTIFIER.PREFIX.eq(coordinates.getIdentifier().toByteArray())) .where(PENDING_COORDINATES.IDENTIFIER.eq(IDENTIFIER.ID)) .and(PENDING_COORDINATES.DIGEST.eq(coordinates.getDigest().toByteArray())) - .and(PENDING_COORDINATES.SEQUENCE_NUMBER.eq(ULong.valueOf(coordinates.getSequenceNumber()) - .toBigInteger())) + .and(PENDING_COORDINATES.SEQUENCE_NUMBER.eq( + ULong.valueOf(coordinates.getSequenceNumber()).toBigInteger())) .and(PENDING_COORDINATES.ILK.eq(coordinates.getIlk())) .fetchOne(); } @@ -204,21 +203,19 @@ public static void upsert(DSLContext dsl, Validations validations) { } /** - * Answer the bloom filter encoding the key events contained within the combined - * intervals + * Answer the bloom filter encoding the key events contained within the combined intervals * * @param seed - the seed for the bloom filter's hash generator - * @param intervals - the combined intervals containing the identifier location - * hashes. + * @param intervals - the combined intervals containing the identifier location hashes. * @param fpr - the false positive rate for the bloom filter - * @return the bloom filter of Digests bounded by the identifier location hash - * intervals + * @return the bloom filter of Digests bounded by the identifier location hash intervals */ public Biff populate(long seed, CombinedIntervals intervals, double fpr) { - DigestBloomFilter bff = new DigestBloomFilter(seed, cardinality(), fpr); + DigestBloomFilter bff = new DigestBloomFilter(seed, Math.max(cardinality(), 100), fpr); try (var connection = connectionPool.getConnection()) { var dsl = DSL.using(connection); eventDigestsIn(intervals, dsl).forEach(d -> { + log.trace("Adding reconcile digest: {}", d); bff.add(d); }); } catch (SQLException e) { @@ -230,11 +227,10 @@ public Biff populate(long seed, CombinedIntervals intervals, double fpr) { /** * Reconcile the intervals for our partner * - * @param intervals - the relevant intervals of identifiers and the event - * digests of these identifiers the partner already have + * @param intervals - the relevant intervals of identifiers and the event digests of these identifiers the partner + * already have * @param kerl - * @return the Update.Builder of missing key events, based on the supplied - * intervals + * @return the Update.Builder of missing key events, based on the supplied intervals */ public Update.Builder reconcile(Intervals intervals, DigestKERL kerl) { var biff = BloomFilter.from(intervals.getHave()); @@ -242,19 +238,19 @@ public Update.Builder reconcile(Intervals intervals, DigestKERL kerl) { try (var connection = connectionPool.getConnection()) { var dsl = DSL.using(connection); intervals.getIntervalsList() - .stream() - .map(i -> new KeyInterval(i)) - .flatMap(i -> eventDigestsIn(i, dsl)) - .filter(d -> !biff.contains(d)) - .map(d -> event(d, dsl, kerl)) - .filter(ke -> ke != null) - .forEach(ke -> { - update.addEvents(ke); - }); + .stream() + .map(KeyInterval::new) + .flatMap(i -> eventDigestsIn(i, dsl)) + .peek(d -> log.trace("reconcile digest: {}", d)) + .filter(d -> !biff.contains(d)) + .peek(d -> log.trace("filtered reconcile digest: {}", d)) + .map(d -> event(d, dsl, kerl)) + .filter(ke -> ke != null) + .forEach(update::addEvents); } catch (SQLException e) { log.error("Unable to provide estimated cardinality, cannot acquire JDBC connection", e); throw new IllegalStateException("Unable to provide estimated cardinality, cannot acquire JDBC connection", - e); + e); } return update; } @@ -265,11 +261,13 @@ public Update.Builder reconcile(Intervals intervals, DigestKERL kerl) { * @param events * @param kerl */ - public void update(List events, KERL kerl) { + public void update(List events, KERL.AppendKERL kerl) { if (events.isEmpty()) { + log.trace("No events to update"); return; } + log.trace("Events to update: {}", events.size()); final var digestAlgorithm = kerl.getDigestAlgorithm(); try (var connection = connectionPool.getConnection()) { @@ -301,59 +299,62 @@ private int cardinality() { return dsl.fetchCount(dsl.selectFrom(IDENTIFIER)); } catch (SQLException e) { log.error("Unable to provide estimated cardinality, cannot acquire JDBC connection", e); - throw new IllegalStateException("Unable to provide estimated cardinality, cannot acquire JDBC connection", - e); + return 0; } } - private void commitPending(DSLContext context, KERL kerl) { + private void commitPending(DSLContext context, KERL.AppendKERL kerl) { + log.trace("Commit pending"); context.select(PENDING_COORDINATES.ID, PENDING_EVENT.EVENT, PENDING_COORDINATES.ILK) - .from(PENDING_EVENT) - .join(PENDING_COORDINATES) - .on(PENDING_COORDINATES.ID.eq(PENDING_EVENT.COORDINATES)) - .join(EVENT) - .on(EVENT.DIGEST.eq(PENDING_COORDINATES.DIGEST)) - .orderBy(PENDING_COORDINATES.SEQUENCE_NUMBER) - .fetchStream() - .forEach(r -> { - KeyEvent event = ProtobufEventFactory.toKeyEvent(r.value2(), r.value3()); - EventCoordinates coordinates = event.getCoordinates(); - if (coordinates != null) { - context.select(PENDING_ATTACHMENT.ATTACHMENT) - .from(PENDING_ATTACHMENT) - .where(PENDING_ATTACHMENT.COORDINATES.eq(r.value1())) - .stream() - .forEach(bytes -> { - try { - Attachment attach = Attachment.parseFrom(bytes.value1()); - kerl.append(Collections.singletonList(new AttachmentEventImpl(AttachmentEvent.newBuilder() - .setCoordinates(coordinates.toEventCoords()) - .setAttachment(attach) - .build()))); - } catch (InvalidProtocolBufferException e) { - log.error("Cannot deserialize attachment", e); - } - }); - context.select(PENDING_VALIDATIONS.VALIDATIONS) - .from(PENDING_VALIDATIONS) - .where(PENDING_VALIDATIONS.COORDINATES.eq(r.value1())) - .stream() - .forEach(bytes -> { - try { - Validations attach = Validations.parseFrom(bytes.value1()); - kerl.appendValidations(coordinates, - attach.getValidationsList() - .stream() - .collect(Collectors.toMap(v -> EventCoordinates.from(v.getValidator()), - v -> JohnHancock.from(v.getSignature())))); - } catch (InvalidProtocolBufferException e) { - log.error("Cannot deserialize validation", e); - } - }); - kerl.append(event); - } - context.deleteFrom(PENDING_COORDINATES).where(PENDING_COORDINATES.ID.eq(r.value1())).execute(); - }); + .from(PENDING_EVENT) + .join(PENDING_COORDINATES) + .on(PENDING_COORDINATES.ID.eq(PENDING_EVENT.COORDINATES)) + .join(EVENT) + .on(EVENT.DIGEST.eq(PENDING_COORDINATES.DIGEST)) + .orderBy(PENDING_COORDINATES.SEQUENCE_NUMBER) + .fetchStream() + .forEach(r -> { + KeyEvent event = ProtobufEventFactory.toKeyEvent(r.value2(), r.value3()); + EventCoordinates coordinates = event.getCoordinates(); + if (coordinates != null) { + context.select(PENDING_ATTACHMENT.ATTACHMENT) + .from(PENDING_ATTACHMENT) + .where(PENDING_ATTACHMENT.COORDINATES.eq(r.value1())) + .stream() + .forEach(bytes -> { + try { + Attachment attach = Attachment.parseFrom(bytes.value1()); + kerl.append(Collections.singletonList(new AttachmentEventImpl( + AttachmentEvent.newBuilder() + .setCoordinates(coordinates.toEventCoords()) + .setAttachment(attach) + .build()))); + } catch (InvalidProtocolBufferException e) { + log.error("Cannot deserialize attachment", e); + } + }); + context.select(PENDING_VALIDATIONS.VALIDATIONS) + .from(PENDING_VALIDATIONS) + .where(PENDING_VALIDATIONS.COORDINATES.eq(r.value1())) + .stream() + .forEach(bytes -> { + try { + Validations attach = Validations.parseFrom(bytes.value1()); + kerl.appendValidations(coordinates, attach.getValidationsList() + .stream() + .collect(Collectors.toMap( + v -> EventCoordinates.from( + v.getValidator()), + v -> JohnHancock.from( + v.getSignature())))); + } catch (InvalidProtocolBufferException e) { + log.error("Cannot deserialize validation", e); + } + }); + kerl.append(event); + } + context.deleteFrom(PENDING_COORDINATES).where(PENDING_COORDINATES.ID.eq(r.value1())).execute(); + }); } private KeyEventWithAttachmentAndValidations_ event(Digest d, DSLContext dsl, DigestKERL kerl) { @@ -367,17 +368,15 @@ private KeyEventWithAttachmentAndValidations_ event(Digest d, DSLContext dsl, Di builder.setAttachment(a.toAttachemente()); Map vs = kerl.getValidations(coordinates); var v = Validations.newBuilder() - .setCoordinates(coordinates.toEventCoords()) - .addAllValidations(vs.entrySet() - .stream() - .map(e -> Validation_.newBuilder() - .setValidator(e.getKey() - .toEventCoords()) - .setSignature(e.getValue() - .toSig()) - .build()) - .toList()) - .build(); + .setCoordinates(coordinates.toEventCoords()) + .addAllValidations(vs.entrySet() + .stream() + .map(e -> Validation_.newBuilder() + .setValidator(e.getKey().toEventCoords()) + .setSignature(e.getValue().toSig()) + .build()) + .toList()) + .build(); builder.setValidations(v); builder.setEvent(event.toKeyEvent_()); return builder.build(); @@ -389,42 +388,43 @@ private Stream eventDigestsIn(CombinedIntervals intervals, DSLContext ds private Stream eventDigestsIn(KeyInterval interval, DSLContext dsl) { return Stream.concat(dsl.select(EVENT.DIGEST) - .from(EVENT) - .join(COORDINATES) - .on(EVENT.COORDINATES.eq(COORDINATES.ID)) - .join(IDENTIFIER) - .on(COORDINATES.IDENTIFIER.eq(IDENTIFIER.ID)) - .join(IDENTIFIER_LOCATION_HASH) - .on(IDENTIFIER.ID.eq(IDENTIFIER_LOCATION_HASH.IDENTIFIER)) - .where(IDENTIFIER_LOCATION_HASH.DIGEST.ge(interval.getBegin().getBytes())) - .and(IDENTIFIER_LOCATION_HASH.DIGEST.le(interval.getEnd().getBytes())) - .stream() - .map(r -> { - try { - return Digest.from(Digeste.parseFrom(r.value1())); - } catch (InvalidProtocolBufferException e) { - return null; - } - }) - .filter(d -> d != null), - dsl.select(PENDING_EVENT.DIGEST) - .from(PENDING_EVENT) - .join(PENDING_COORDINATES) - .on(PENDING_EVENT.COORDINATES.eq(PENDING_COORDINATES.ID)) - .join(IDENTIFIER) - .on(PENDING_COORDINATES.IDENTIFIER.eq(IDENTIFIER.ID)) - .join(IDENTIFIER_LOCATION_HASH) - .on(IDENTIFIER.ID.eq(IDENTIFIER_LOCATION_HASH.IDENTIFIER)) - .where(IDENTIFIER_LOCATION_HASH.DIGEST.ge(interval.getBegin().getBytes())) - .and(IDENTIFIER_LOCATION_HASH.DIGEST.le(interval.getEnd().getBytes())) - .stream() - .map(r -> { - try { - return Digest.from(Digeste.parseFrom(r.value1())); - } catch (InvalidProtocolBufferException e) { - return null; - } - }) - .filter(d -> d != null)); + .from(EVENT) + .join(COORDINATES) + .on(EVENT.COORDINATES.eq(COORDINATES.ID)) + .join(IDENTIFIER) + .on(COORDINATES.IDENTIFIER.eq(IDENTIFIER.ID)) + .join(IDENTIFIER_LOCATION_HASH) + .on(IDENTIFIER.ID.eq(IDENTIFIER_LOCATION_HASH.IDENTIFIER)) + .where(IDENTIFIER_LOCATION_HASH.DIGEST.ge(interval.getBegin().getBytes())) + .and(IDENTIFIER_LOCATION_HASH.DIGEST.le(interval.getEnd().getBytes())) + .stream() + .map(r -> { + try { + return Digest.from(Digeste.parseFrom(r.value1())); + } catch (InvalidProtocolBufferException e) { + return null; + } + }) + .filter(d -> d != null), dsl.select(PENDING_EVENT.DIGEST) + .from(PENDING_EVENT) + .join(PENDING_COORDINATES) + .on(PENDING_EVENT.COORDINATES.eq(PENDING_COORDINATES.ID)) + .join(IDENTIFIER) + .on(PENDING_COORDINATES.IDENTIFIER.eq(IDENTIFIER.ID)) + .join(IDENTIFIER_LOCATION_HASH) + .on(IDENTIFIER.ID.eq(IDENTIFIER_LOCATION_HASH.IDENTIFIER)) + .where(IDENTIFIER_LOCATION_HASH.DIGEST.ge( + interval.getBegin().getBytes())) + .and(IDENTIFIER_LOCATION_HASH.DIGEST.le( + interval.getEnd().getBytes())) + .stream() + .map(r -> { + try { + return Digest.from(Digeste.parseFrom(r.value1())); + } catch (InvalidProtocolBufferException e) { + return null; + } + }) + .filter(d -> d != null)); } } diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/KeyInterval.java b/thoth/src/main/java/com/salesforce/apollo/thoth/KeyInterval.java index 8d2147e27c..60147353e1 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/KeyInterval.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/KeyInterval.java @@ -7,14 +7,13 @@ package com.salesforce.apollo.thoth; -import java.util.function.Predicate; - -import com.salesfoce.apollo.thoth.proto.Interval; +import com.salesforce.apollo.thoth.proto.Interval; import com.salesforce.apollo.cryptography.Digest; +import java.util.function.Predicate; + /** * @author hal.hildebrand - * */ public class KeyInterval implements Predicate { private final Digest begin; @@ -30,11 +29,6 @@ public KeyInterval(Interval interval) { this(Digest.from(interval.getStart()), Digest.from(interval.getEnd())); } - @Override - public boolean test(Digest t) { - return begin.compareTo(t) > 0 && end.compareTo(t) > 0; - } - public Digest getBegin() { return begin; } @@ -43,6 +37,11 @@ public Digest getEnd() { return end; } + @Override + public boolean test(Digest t) { + return begin.compareTo(t) > 0 && end.compareTo(t) > 0; + } + public Interval toInterval() { return Interval.newBuilder().setStart(begin.toDigeste()).setEnd(end.toDigeste()).build(); } diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/LoggingOutputStream.java b/thoth/src/main/java/com/salesforce/apollo/thoth/LoggingOutputStream.java index 4b7e3b8008..be365bdbec 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/LoggingOutputStream.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/LoggingOutputStream.java @@ -7,25 +7,19 @@ package com.salesforce.apollo.thoth; +import org.slf4j.Logger; + import java.io.ByteArrayOutputStream; import java.io.OutputStream; -import org.slf4j.Logger; - /** * @author hal.hildebrand - * */ public class LoggingOutputStream extends OutputStream { - public enum LogLevel { - DEBUG, ERROR, INFO, TRACE, WARN, - } - private final ByteArrayOutputStream baos = new ByteArrayOutputStream(1000); private final LogLevel level; - - private final Logger logger; + private final Logger logger; public LoggingOutputStream(Logger logger, LogLevel level) { this.logger = logger; @@ -60,4 +54,8 @@ public void write(int b) { } } + public enum LogLevel { + DEBUG, ERROR, INFO, TRACE, WARN, + } + } diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/Maat.java b/thoth/src/main/java/com/salesforce/apollo/thoth/Maat.java index 7ba584517b..ad514cf797 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/Maat.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/Maat.java @@ -36,13 +36,13 @@ * @author hal.hildebrand */ public class Maat extends DelegatedKERL { - private static Logger log = LoggerFactory.getLogger(Maat.class); + private static final Logger log = LoggerFactory.getLogger(Maat.class); private final Context context; private final KERL validators; - public Maat(Context context, KERL delegate, KERL validators) { + public Maat(Context context, AppendKERL delegate, KERL validators) { super(delegate); this.context = context; this.validators = validators; @@ -50,6 +50,7 @@ public Maat(Context context, KERL delegate, KERL validators) { @Override public KeyState append(KeyEvent event) { + log.trace("Append: {}", event); var l = append(Collections.singletonList(event), Collections.emptyList()); return l.isEmpty() ? null : l.get(0); } diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/Publisher.java b/thoth/src/main/java/com/salesforce/apollo/thoth/Publisher.java index 90ad3266c6..c3358d1a6f 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/Publisher.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/Publisher.java @@ -6,10 +6,10 @@ */ package com.salesforce.apollo.thoth; -import com.salesfoce.apollo.stereotomy.event.proto.AttachmentEvent; -import com.salesfoce.apollo.stereotomy.event.proto.KERL_; -import com.salesfoce.apollo.stereotomy.event.proto.KeyEvent_; -import com.salesfoce.apollo.stereotomy.event.proto.Validations; +import com.salesforce.apollo.stereotomy.event.proto.AttachmentEvent; +import com.salesforce.apollo.stereotomy.event.proto.KERL_; +import com.salesforce.apollo.stereotomy.event.proto.KeyEvent_; +import com.salesforce.apollo.stereotomy.event.proto.Validations; import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.Router.ServiceRouting; import com.salesforce.apollo.archipelago.RouterImpl.CommonCommunications; @@ -30,16 +30,17 @@ public class Publisher implements ProtoEventObserver { private final CommonCommunications comms; - private final Digest context; - private final ProtoKERLAdapter kerl; - private final EventObserver service; + private final Digest context; + private final ProtoKERLAdapter kerl; + private final EventObserver service; + public Publisher(SigningMember member, ProtoKERLAdapter kerl, Router router, Digest context) { this.kerl = kerl; this.context = context; service = new Service(); comms = router.create(member, context, service, service.getClass().getSimpleName(), - r -> new EventObserverServer(r, router.getClientIdentityProvider(), null), null, - EventObserverClient.getLocalLoopback(this, member)); + r -> new EventObserverServer(r, router.getClientIdentityProvider(), null), null, + EventObserverClient.getLocalLoopback(this, member)); } @Override @@ -78,8 +79,7 @@ public void publishAttachments(List attachments, Digest from) { } @Override - public void publishEvents(List events, List validations, - Digest from) { + public void publishEvents(List events, List validations, Digest from) { Publisher.this.publishEvents(events, validations); } } diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/Thoth.java b/thoth/src/main/java/com/salesforce/apollo/thoth/Thoth.java index f2dd3b7747..04e4c86f27 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/Thoth.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/Thoth.java @@ -21,6 +21,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; /** @@ -29,14 +30,23 @@ * @author hal.hildebrand */ public class Thoth { - private static final Logger log = LoggerFactory.getLogger(Thoth.class); - private final Stereotomy stereotomy; - private volatile SelfAddressingIdentifier controller; - private volatile ControlledIdentifier identifier; - private volatile Consumer pending; + private static final Logger log = LoggerFactory.getLogger( + Thoth.class); + private final Stereotomy stereotomy; + private final Consumer> onInception; + private volatile SelfAddressingIdentifier controller; + private volatile ControlledIdentifier identifier; + private volatile Consumer pending; + private AtomicBoolean initialized = new AtomicBoolean(); public Thoth(Stereotomy stereotomy) { + this(stereotomy, identifier -> { + }); + } + + public Thoth(Stereotomy stereotomy, Consumer> onInception) { this.stereotomy = stereotomy; + this.onInception = onInception; } public void commit(EventCoordinates coords) { @@ -50,14 +60,18 @@ public void commit(EventCoordinates coords) { } public SelfAddressingIdentifier identifier() { - if (identifier == null) { + final var current = identifier; + if (current == null) { throw new IllegalStateException("Identifier has not been established"); } - return identifier.getIdentifier(); + return current.getIdentifier(); } public DelegatedInceptionEvent inception(SelfAddressingIdentifier controller, IdentifierSpecification.Builder specification) { + if (initialized.get()) { + throw new IllegalStateException("Already initialized for: " + identifier); + } final var inception = stereotomy.newDelegatedIdentifier(controller, specification); pending = inception(inception); return inception; @@ -85,30 +99,33 @@ public DelegatedRotationEvent rotate(RotationSpecification.Builder specification private Consumer inception(DelegatedInceptionEvent incp) { return coordinates -> { - var commitment = ProtobufEventFactory.INSTANCE.attachment(incp, - new AttachmentImpl(Seal.EventSeal.construct(coordinates.getIdentifier(), - coordinates.getDigest(), - coordinates.getSequenceNumber() - .longValue()))); + if (!initialized.compareAndSet(false, true)) { + return; + } + var commitment = ProtobufEventFactory.INSTANCE.attachment(incp, new AttachmentImpl( + Seal.EventSeal.construct(coordinates.getIdentifier(), coordinates.getDigest(), + coordinates.getSequenceNumber().longValue()))); ControlledIdentifier cid = stereotomy.commit(incp, commitment); identifier = cid; controller = (SelfAddressingIdentifier) identifier.getDelegatingIdentifier().get(); pending = null; + if (onInception != null) { + log.info("Notifying inception complete for: {} controller: {}", identifier.getIdentifier(), controller); + onInception.accept(identifier); + } log.info("Created delegated identifier: {} controller: {}", identifier.getIdentifier(), controller); }; } private Consumer rotation(DelegatedRotationEvent rot) { return coordinates -> { - var commitment = ProtobufEventFactory.INSTANCE.attachment(rot, - new AttachmentImpl(Seal.EventSeal.construct(coordinates.getIdentifier(), - coordinates.getDigest(), - coordinates.getSequenceNumber() - .longValue()))); + var commitment = ProtobufEventFactory.INSTANCE.attachment(rot, new AttachmentImpl( + Seal.EventSeal.construct(coordinates.getIdentifier(), coordinates.getDigest(), + coordinates.getSequenceNumber().longValue()))); Void cid = identifier.commit(rot, commitment); pending = null; log.info("Rotated delegated identifier: {} controller: {}", identifier.getCoordinates(), controller, - identifier.getCoordinates()); + identifier.getCoordinates()); }; } } diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/ThothServer.java b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/ThothServer.java new file mode 100644 index 0000000000..39ffe8d1ee --- /dev/null +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/ThothServer.java @@ -0,0 +1,110 @@ +package com.salesforce.apollo.thoth.grpc; + +import com.google.protobuf.Empty; +import com.salesforce.apollo.stereotomy.event.proto.EventCoords; +import com.salesforce.apollo.stereotomy.event.proto.Ident; +import com.salesforce.apollo.stereotomy.event.proto.InceptionEvent; +import com.salesforce.apollo.stereotomy.event.proto.RotationEvent; +import com.salesforce.apollo.thoth.proto.Thoth_Grpc; +import com.salesforce.apollo.stereotomy.EventCoordinates; +import com.salesforce.apollo.stereotomy.event.protobuf.InceptionEventImpl; +import com.salesforce.apollo.stereotomy.event.protobuf.RotationEventImpl; +import com.salesforce.apollo.stereotomy.identifier.Identifier; +import com.salesforce.apollo.stereotomy.identifier.SelfAddressingIdentifier; +import com.salesforce.apollo.stereotomy.identifier.spec.IdentifierSpecification; +import com.salesforce.apollo.stereotomy.identifier.spec.RotationSpecification; +import com.salesforce.apollo.thoth.Thoth; +import io.grpc.stub.StreamObserver; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @author hal.hildebrand + **/ +public class ThothServer extends Thoth_Grpc.Thoth_ImplBase { + private static final Logger log = LoggerFactory.getLogger( + ThothServer.class); + private final IdentifierSpecification.Builder inception; + private final RotationSpecification.Builder rotation; + private final Thoth thoth; + + public ThothServer(Thoth thoth) { + this(IdentifierSpecification.newBuilder(), RotationSpecification.newBuilder(), thoth); + } + + public ThothServer(IdentifierSpecification.Builder inception, + RotationSpecification.Builder rotation, Thoth thoth) { + this.inception = inception; + this.rotation = rotation; + this.thoth = thoth; + } + + @Override + public void commit(EventCoords request, StreamObserver responseObserver) { + var from = EventCoordinates.from(request); + try { + thoth.commit(from); + responseObserver.onNext(Empty.getDefaultInstance()); + responseObserver.onCompleted(); + } catch (Throwable t) { + log.info("Error committing delegation event: " + from, t); + responseObserver.onError(t); + } + } + + public Thoth getThoth() { + return thoth; + } + + @Override + public void identifier(Empty request, StreamObserver responseObserver) { + try { + var ident = thoth.identifier().toIdent(); + responseObserver.onNext(ident); + responseObserver.onCompleted(); + } catch (Throwable t) { + log.info("Error getting identifier", t); + responseObserver.onError(t); + } + } + + @Override + public void inception(Ident request, StreamObserver responseObserver) { + try { + var i = Identifier.from(request); + if (i instanceof SelfAddressingIdentifier sai) { + var incep = thoth.inception(sai, inception); + if (incep instanceof InceptionEventImpl incp) { + responseObserver.onNext(incp.toInceptionEvent_()); + responseObserver.onCompleted(); + } else { + log.info("Not an inception event impl: {}", incep); + responseObserver.onError(new IllegalArgumentException("Not an inception event: " + incep)); + } + } else { + log.info("Not a SelfAddressingIdentifier: {}", i); + responseObserver.onError(new IllegalArgumentException("Not a SelfAddressingIdentifier: " + i)); + } + } catch (Throwable t) { + log.info("Error creating inception event", t); + responseObserver.onError(t); + } + } + + @Override + public void rotate(Empty request, StreamObserver responseObserver) { + try { + var rot = thoth.rotate(rotation); + if (rot instanceof RotationEventImpl incp) { + responseObserver.onNext(incp.toRotationEvent_()); + responseObserver.onCompleted(); + } else { + log.info("Not a rotation event impl: {}", rot); + responseObserver.onError(new IllegalArgumentException("Not a rotation event: " + rot)); + } + } catch (Throwable t) { + log.info("Error rotating identifier", t); + responseObserver.onError(t); + } + } +} diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/delegation/Delegation.java b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/delegation/Delegation.java index 32305fbf16..d072d419f3 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/delegation/Delegation.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/delegation/Delegation.java @@ -6,17 +6,16 @@ */ package com.salesforce.apollo.thoth.grpc.delegation; -import java.util.concurrent.CompletableFuture; - import com.salesforce.apollo.cryptography.SigningThreshold; import com.salesforce.apollo.stereotomy.event.DelegatedInceptionEvent; import com.salesforce.apollo.stereotomy.event.DelegatedRotationEvent; import com.salesforce.apollo.stereotomy.identifier.SelfAddressingIdentifier; import com.salesforce.apollo.stereotomy.identifier.spec.RotationSpecification; +import java.util.concurrent.CompletableFuture; + /** * @author hal.hildebrand - * */ public interface Delegation { DelegatedInceptionEvent inception(SelfAddressingIdentifier controller, SigningThreshold signingThreshold, diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/delegation/DelegationClient.java b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/delegation/DelegationClient.java index b28919aebd..c99cfe1944 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/delegation/DelegationClient.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/delegation/DelegationClient.java @@ -8,7 +8,6 @@ /** * @author hal.hildebrand - * */ public class DelegationClient { diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/delegation/DelegationServer.java b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/delegation/DelegationServer.java index df661c60d3..1da8819193 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/delegation/DelegationServer.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/delegation/DelegationServer.java @@ -6,11 +6,10 @@ */ package com.salesforce.apollo.thoth.grpc.delegation; -import com.salesfoce.apollo.thoth.proto.DelegatedGrpc.DelegatedImplBase; +import com.salesforce.apollo.thoth.proto.DelegatedGrpc.DelegatedImplBase; /** * @author hal.hildebrand - * */ public class DelegationServer extends DelegatedImplBase { diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/delegation/DelegationService.java b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/delegation/DelegationService.java index 2563f94579..d90cfe69a6 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/delegation/DelegationService.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/delegation/DelegationService.java @@ -10,7 +10,6 @@ /** * @author hal.hildebrand - * */ public interface DelegationService extends Link { diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/dht/DhtClient.java b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/dht/DhtClient.java index f6b1d4c76c..44b491f964 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/dht/DhtClient.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/dht/DhtClient.java @@ -8,9 +8,9 @@ import com.codahale.metrics.Timer.Context; import com.google.protobuf.Empty; -import com.salesfoce.apollo.stereotomy.event.proto.*; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.*; -import com.salesfoce.apollo.thoth.proto.KerlDhtGrpc; +import com.salesforce.apollo.stereotomy.event.proto.*; +import com.salesforce.apollo.stereotomy.services.grpc.proto.*; +import com.salesforce.apollo.thoth.proto.KerlDhtGrpc; import com.salesforce.apollo.archipelago.ManagedServerChannel; import com.salesforce.apollo.archipelago.ServerConnectionCache.CreateClientCommunications; import com.salesforce.apollo.membership.Member; @@ -98,6 +98,11 @@ public KeyState_ getKeyState(Ident identifier) { return service.getKeyState(identifier); } + @Override + public KeyState_ getKeyState(IdentAndSeq identAndSeq) { + return null; + } + @Override public KeyStateWithAttachments_ getKeyStateWithAttachments(EventCoords coordinates) { return service.getKeyStateWithAttachments(coordinates); @@ -118,11 +123,6 @@ public Member getMember() { public Validations getValidations(EventCoords coordinates) { return service.getValidations(coordinates); } - - @Override - public KeyState_ getKeyState(IdentAndSeq identAndSeq) { - return null; - } }; } @@ -312,6 +312,27 @@ public KeyState_ getKeyState(Ident identifier) { return result; } + @Override + public KeyState_ getKeyState(IdentAndSeq identAndSeq) { + Context timer = metrics == null ? null : metrics.getKeyStateClient().time(); + if (metrics != null) { + final var bs = identAndSeq.getSerializedSize(); + metrics.outboundBandwidth().mark(bs); + metrics.outboundGetKeyStateRequest().mark(bs); + } + var result = client.getKeyStateSeqNum(identAndSeq); + if (timer != null) { + timer.stop(); + } + if (timer != null) { + final var serializedSize = result.getSerializedSize(); + timer.stop(); + metrics.inboundBandwidth().mark(serializedSize); + metrics.inboundGetKeyStateCoordsResponse().mark(serializedSize); + } + return result; + } + @Override public KeyStateWithAttachments_ getKeyStateWithAttachments(EventCoords coordinates) { Context timer = metrics == null ? null : metrics.getAttachmentClient().time(); @@ -377,25 +398,4 @@ public Validations getValidations(EventCoords coordinates) { } return complete; } - - @Override - public KeyState_ getKeyState(IdentAndSeq identAndSeq) { - Context timer = metrics == null ? null : metrics.getKeyStateClient().time(); - if (metrics != null) { - final var bs = identAndSeq.getSerializedSize(); - metrics.outboundBandwidth().mark(bs); - metrics.outboundGetKeyStateRequest().mark(bs); - } - var result = client.getKeyStateSeqNum(identAndSeq); - if (timer != null) { - timer.stop(); - } - if (timer != null) { - final var serializedSize = result.getSerializedSize(); - timer.stop(); - metrics.inboundBandwidth().mark(serializedSize); - metrics.inboundGetKeyStateCoordsResponse().mark(serializedSize); - } - return result; - } } diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/dht/DhtServer.java b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/dht/DhtServer.java index c429d66cfe..d6b654abbc 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/dht/DhtServer.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/dht/DhtServer.java @@ -8,9 +8,9 @@ import com.codahale.metrics.Timer.Context; import com.google.protobuf.Empty; -import com.salesfoce.apollo.stereotomy.event.proto.*; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.*; -import com.salesfoce.apollo.thoth.proto.KerlDhtGrpc.KerlDhtImplBase; +import com.salesforce.apollo.stereotomy.event.proto.*; +import com.salesforce.apollo.stereotomy.services.grpc.proto.*; +import com.salesforce.apollo.thoth.proto.KerlDhtGrpc.KerlDhtImplBase; import com.salesforce.apollo.archipelago.RoutableService; import com.salesforce.apollo.stereotomy.services.grpc.StereotomyMetrics; import com.salesforce.apollo.stereotomy.services.proto.ProtoKERLService; @@ -23,7 +23,7 @@ */ public class DhtServer extends KerlDhtImplBase { - private final StereotomyMetrics metrics; + private final StereotomyMetrics metrics; private final RoutableService routing; public DhtServer(RoutableService router, StereotomyMetrics metrics) { diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/dht/DhtService.java b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/dht/DhtService.java index 6aef9d6bc4..ab7c9dff99 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/dht/DhtService.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/dht/DhtService.java @@ -7,8 +7,8 @@ package com.salesforce.apollo.thoth.grpc.dht; import com.google.protobuf.Empty; -import com.salesfoce.apollo.stereotomy.event.proto.*; -import com.salesfoce.apollo.stereotomy.services.grpc.proto.KeyStates; +import com.salesforce.apollo.stereotomy.event.proto.*; +import com.salesforce.apollo.stereotomy.services.grpc.proto.KeyStates; import com.salesforce.apollo.archipelago.Link; import java.util.List; @@ -41,11 +41,11 @@ public interface DhtService extends Link { KeyState_ getKeyState(Ident identifier); + KeyState_ getKeyState(IdentAndSeq identAndSeq); + KeyStateWithAttachments_ getKeyStateWithAttachments(EventCoords coordinates); KeyStateWithEndorsementsAndValidations_ getKeyStateWithEndorsementsAndValidations(EventCoords coordinates); Validations getValidations(EventCoords coordinates); - - KeyState_ getKeyState(IdentAndSeq identAndSeq); } diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/reconciliation/Reconciliation.java b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/reconciliation/Reconciliation.java index 728e0f19a3..100988f7f7 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/reconciliation/Reconciliation.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/reconciliation/Reconciliation.java @@ -7,14 +7,13 @@ package com.salesforce.apollo.thoth.grpc.reconciliation; -import com.salesfoce.apollo.thoth.proto.Intervals; -import com.salesfoce.apollo.thoth.proto.Update; -import com.salesfoce.apollo.thoth.proto.Updating; +import com.salesforce.apollo.thoth.proto.Intervals; +import com.salesforce.apollo.thoth.proto.Update; +import com.salesforce.apollo.thoth.proto.Updating; import com.salesforce.apollo.cryptography.Digest; /** * @author hal.hildebrand - * */ public interface Reconciliation { Update reconcile(Intervals intervals, Digest member); diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/reconciliation/ReconciliationClient.java b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/reconciliation/ReconciliationClient.java index 2d84b4c315..20b69b38f4 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/reconciliation/ReconciliationClient.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/reconciliation/ReconciliationClient.java @@ -7,14 +7,12 @@ package com.salesforce.apollo.thoth.grpc.reconciliation; -import java.io.IOException; - import com.google.protobuf.Empty; -import com.salesfoce.apollo.thoth.proto.Intervals; -import com.salesfoce.apollo.thoth.proto.ReconciliationGrpc; -import com.salesfoce.apollo.thoth.proto.Update; -import com.salesfoce.apollo.thoth.proto.Updating; -import com.salesfoce.apollo.cryptography.proto.Digeste; +import com.salesforce.apollo.cryptography.proto.Digeste; +import com.salesforce.apollo.thoth.proto.Intervals; +import com.salesforce.apollo.thoth.proto.ReconciliationGrpc; +import com.salesforce.apollo.thoth.proto.Update; +import com.salesforce.apollo.thoth.proto.Updating; import com.salesforce.apollo.archipelago.ManagedServerChannel; import com.salesforce.apollo.archipelago.ServerConnectionCache.CreateClientCommunications; import com.salesforce.apollo.cryptography.Digest; @@ -22,11 +20,26 @@ import com.salesforce.apollo.membership.SigningMember; import com.salesforce.apollo.stereotomy.services.grpc.StereotomyMetrics; +import java.io.IOException; + /** * @author hal.hildebrand - * */ public class ReconciliationClient implements ReconciliationService { + private final ManagedServerChannel channel; + private final ReconciliationGrpc.ReconciliationBlockingStub client; + @SuppressWarnings("unused") + private final Digeste context; + @SuppressWarnings("unused") + private final StereotomyMetrics metrics; + + public ReconciliationClient(Digest context, ManagedServerChannel channel, StereotomyMetrics metrics) { + this.context = context.toDigeste(); + this.channel = channel; + this.client = ReconciliationGrpc.newBlockingStub(channel).withCompression("gzip"); + this.metrics = metrics; + } + public static CreateClientCommunications getCreate(Digest context, StereotomyMetrics metrics) { return (c) -> { @@ -47,31 +60,17 @@ public Member getMember() { } @Override - public Update reconcile(Intervals intervals) { + public Update reconcile(Intervals intervals) { return Update.getDefaultInstance(); } @Override - public Empty update(Updating update) { + public Empty update(Updating update) { return Empty.getDefaultInstance(); } }; } - private final ManagedServerChannel channel; - private final ReconciliationGrpc.ReconciliationBlockingStub client; - @SuppressWarnings("unused") - private final Digeste context; - @SuppressWarnings("unused") - private final StereotomyMetrics metrics; - - public ReconciliationClient(Digest context, ManagedServerChannel channel, StereotomyMetrics metrics) { - this.context = context.toDigeste(); - this.channel = channel; - this.client = ReconciliationGrpc.newBlockingStub(channel).withCompression("gzip"); - this.metrics = metrics; - } - @Override public void close() throws IOException { channel.release(); @@ -83,12 +82,12 @@ public Member getMember() { } @Override - public Update reconcile(Intervals intervals) { + public Update reconcile(Intervals intervals) { return client.reconcile(intervals); } @Override - public Empty update(Updating update) { + public Empty update(Updating update) { return client.update(update); } } diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/reconciliation/ReconciliationServer.java b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/reconciliation/ReconciliationServer.java index 47f2337bac..340a4dc9c0 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/reconciliation/ReconciliationServer.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/reconciliation/ReconciliationServer.java @@ -8,20 +8,18 @@ package com.salesforce.apollo.thoth.grpc.reconciliation; import com.google.protobuf.Empty; -import com.salesfoce.apollo.thoth.proto.Intervals; -import com.salesfoce.apollo.thoth.proto.ReconciliationGrpc.ReconciliationImplBase; -import com.salesfoce.apollo.thoth.proto.Update; -import com.salesfoce.apollo.thoth.proto.Updating; +import com.salesforce.apollo.thoth.proto.Intervals; +import com.salesforce.apollo.thoth.proto.ReconciliationGrpc.ReconciliationImplBase; +import com.salesforce.apollo.thoth.proto.Update; +import com.salesforce.apollo.thoth.proto.Updating; import com.salesforce.apollo.archipelago.RoutableService; import com.salesforce.apollo.cryptography.Digest; import com.salesforce.apollo.protocols.ClientIdentity; import com.salesforce.apollo.stereotomy.services.grpc.StereotomyMetrics; - import io.grpc.stub.StreamObserver; /** * @author hal.hildebrand - * */ public class ReconciliationServer extends ReconciliationImplBase { private final ClientIdentity identity; diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/reconciliation/ReconciliationService.java b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/reconciliation/ReconciliationService.java index 20f6953ed0..ac981e2c57 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/reconciliation/ReconciliationService.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/grpc/reconciliation/ReconciliationService.java @@ -7,20 +7,18 @@ package com.salesforce.apollo.thoth.grpc.reconciliation; -import com.google.common.util.concurrent.ListenableFuture; import com.google.protobuf.Empty; -import com.salesfoce.apollo.thoth.proto.Intervals; -import com.salesfoce.apollo.thoth.proto.Update; -import com.salesfoce.apollo.thoth.proto.Updating; +import com.salesforce.apollo.thoth.proto.Intervals; +import com.salesforce.apollo.thoth.proto.Update; +import com.salesforce.apollo.thoth.proto.Updating; import com.salesforce.apollo.archipelago.Link; /** * @author hal.hildebrand - * */ public interface ReconciliationService extends Link { - Update reconcile(Intervals intervals); + Update reconcile(Intervals intervals); Empty update(Updating update); diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/metrics/GorgoneionMetrics.java b/thoth/src/main/java/com/salesforce/apollo/thoth/metrics/GorgoneionMetrics.java index 019a3ffa8c..ae8fb1d4b5 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/metrics/GorgoneionMetrics.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/metrics/GorgoneionMetrics.java @@ -8,7 +8,6 @@ /** * @author hal.hildebrand - * */ public interface GorgoneionMetrics { diff --git a/thoth/src/main/java/com/salesforce/apollo/thoth/metrics/KerlDhtMetrics.java b/thoth/src/main/java/com/salesforce/apollo/thoth/metrics/KerlDhtMetrics.java index 24305f183b..9362a19978 100644 --- a/thoth/src/main/java/com/salesforce/apollo/thoth/metrics/KerlDhtMetrics.java +++ b/thoth/src/main/java/com/salesforce/apollo/thoth/metrics/KerlDhtMetrics.java @@ -8,7 +8,6 @@ /** * @author hal.hildebrand - * */ public interface KerlDhtMetrics { diff --git a/thoth/src/test/java/com/salesforce/apollo/thoth/AbstractDhtTest.java b/thoth/src/test/java/com/salesforce/apollo/thoth/AbstractDhtTest.java index ad83f13169..2fa5dcca66 100644 --- a/thoth/src/test/java/com/salesforce/apollo/thoth/AbstractDhtTest.java +++ b/thoth/src/test/java/com/salesforce/apollo/thoth/AbstractDhtTest.java @@ -41,7 +41,6 @@ import java.time.Duration; import java.util.*; import java.util.concurrent.ConcurrentSkipListMap; -import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.BiFunction; import java.util.stream.Collectors; @@ -53,19 +52,19 @@ * @author hal.hildebrand */ public class AbstractDhtTest { - protected static final ProtobufEventFactory factory = new ProtobufEventFactory(); - protected static final boolean LARGE_TESTS = Boolean.getBoolean( - "large_tests"); - protected static final double PBYZ = 0.25; - protected final Map dhts = new HashMap<>(); - protected final Map routers = new HashMap<>(); - protected final AtomicBoolean gate = new AtomicBoolean( + protected static final ProtobufEventFactory factory = new ProtobufEventFactory(); + protected static final boolean LARGE_TESTS = Boolean.getBoolean("large_tests"); + protected static final double PBYZ = 0.25; + + protected final TreeMap dhts = new TreeMap<>(); + protected final Map routers = new HashMap<>(); + protected final AtomicBoolean gate = new AtomicBoolean( false); - protected Context context; - protected Map> identities; - protected MemKERL kerl; - protected String prefix; - protected Stereotomy stereotomy; + protected Context context; + protected Map> identities; + protected MemKERL kerl; + protected String prefix; + protected Stereotomy stereotomy; public AbstractDhtTest() { super(); @@ -131,17 +130,16 @@ public void before() throws Exception { } protected int getCardinality() { - return LARGE_TESTS ? 100 : 5; + return LARGE_TESTS ? 10 : 5; } protected void instantiate(SigningMember member, Context context, ConcurrentSkipListMap serverMembers) { context.activate(member); - final var url = String.format("jdbc:h2:mem:%s-%s;DB_CLOSE_DELAY=-1", member.getId(), prefix); + final var url = String.format("jdbc:h2:mem:%s-%s;DB_CLOSE_ON_EXIT=FALSE", member.getId(), prefix); context.activate(member); JdbcConnectionPool connectionPool = JdbcConnectionPool.create(url, "", ""); connectionPool.setMaxConnections(10); - var exec = Executors.newVirtualThreadPerTaskExecutor(); var router = new LocalServer(prefix, member).router(ServerConnectionCache.newBuilder().setTarget(2)); routers.put(member, router); dhts.put(member, @@ -149,7 +147,7 @@ protected void instantiate(SigningMember member, Context context, router, Duration.ofSeconds(10), 0.0125, null)); } - protected BiFunction wrap() { + protected BiFunction wrap() { return (t, k) -> k; } } diff --git a/thoth/src/test/java/com/salesforce/apollo/thoth/AniTest.java b/thoth/src/test/java/com/salesforce/apollo/thoth/AniTest.java index 2834e31b6e..3634709586 100644 --- a/thoth/src/test/java/com/salesforce/apollo/thoth/AniTest.java +++ b/thoth/src/test/java/com/salesforce/apollo/thoth/AniTest.java @@ -6,23 +6,20 @@ */ package com.salesforce.apollo.thoth; -import static org.junit.jupiter.api.Assertions.assertTrue; +import com.salesforce.apollo.membership.SigningMember; +import com.salesforce.apollo.stereotomy.identifier.spec.IdentifierSpecification; +import org.junit.jupiter.api.Test; import java.security.SecureRandom; import java.time.Duration; import java.util.Collections; import java.util.Map; -import java.util.concurrent.Executors; import java.util.stream.Collectors; -import org.junit.jupiter.api.Test; - -import com.salesforce.apollo.membership.SigningMember; -import com.salesforce.apollo.stereotomy.identifier.spec.IdentifierSpecification; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * @author hal.hildebrand - * */ public class AniTest extends AbstractDhtTest { @@ -32,17 +29,15 @@ public void smokin() throws Exception { entropy.setSeed(new byte[] { 7, 7, 7 }); routers.values().forEach(lr -> lr.start()); - dhts.values() - .forEach(e -> e.start( - Duration.ofSeconds(1))); + dhts.values().forEach(e -> e.start(Duration.ofSeconds(1))); - var dht = dhts.values().stream().findFirst().get(); + var dht = dhts.firstEntry().getValue(); Map anis = dhts.entrySet() .stream() - .collect(Collectors.toMap(e -> e.getKey(), - e -> new Ani(e.getKey().getId(), - dhts.get(e.getKey()).asKERL()))); + .collect(Collectors.toMap(e -> e.getKey(), e -> new Ani(e.getKey().getId(), + dhts.get(e.getKey()) + .asKERL()))); var ani = anis.values().stream().findFirst().get(); // inception @@ -51,7 +46,7 @@ public void smokin() throws Exception { var nextKeyPair = specification.getSignatureAlgorithm().generateKeyPair(entropy); var inception = inception(specification, initialKeyPair, factory, nextKeyPair); - dht.append(Collections.singletonList(inception.toKeyEvent_())) ; + dht.append(Collections.singletonList(inception.toKeyEvent_())); assertTrue(ani.eventValidation(Duration.ofSeconds(10)).validate(inception)); } diff --git a/thoth/src/test/java/com/salesforce/apollo/thoth/BootstrappingTest.java b/thoth/src/test/java/com/salesforce/apollo/thoth/BootstrappingTest.java index 5700dd1224..906ee0f2c7 100644 --- a/thoth/src/test/java/com/salesforce/apollo/thoth/BootstrappingTest.java +++ b/thoth/src/test/java/com/salesforce/apollo/thoth/BootstrappingTest.java @@ -7,9 +7,8 @@ package com.salesforce.apollo.thoth; import com.google.protobuf.Any; -import com.salesfoce.apollo.gorgoneion.proto.SignedNonce; -import com.salesfoce.apollo.stereotomy.event.proto.Validations; import com.salesforce.apollo.archipelago.LocalServer; +import com.salesforce.apollo.archipelago.Router; import com.salesforce.apollo.archipelago.ServerConnectionCache; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.gorgoneion.Gorgoneion; @@ -19,14 +18,17 @@ import com.salesforce.apollo.gorgoneion.client.client.comm.AdmissionsClient; import com.salesforce.apollo.gorgoneion.comm.admissions.AdmissionsServer; import com.salesforce.apollo.gorgoneion.comm.admissions.AdmissionsService; +import com.salesforce.apollo.gorgoneion.proto.SignedNonce; import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; import com.salesforce.apollo.stereotomy.KERL; import com.salesforce.apollo.stereotomy.KeyState; import com.salesforce.apollo.stereotomy.StereotomyImpl; +import com.salesforce.apollo.stereotomy.event.proto.Validations; import com.salesforce.apollo.stereotomy.mem.MemKERL; import com.salesforce.apollo.stereotomy.mem.MemKeyStore; import com.salesforce.apollo.stereotomy.services.proto.ProtoKERLAdapter; import com.salesforce.apollo.utils.Utils; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; import java.security.SecureRandom; @@ -44,6 +46,15 @@ */ public class BootstrappingTest extends AbstractDhtTest { + private Router clientRouter; + + @AfterEach + public void closeClient() throws Exception { + if (clientRouter != null) { + clientRouter.close(Duration.ofSeconds(3)); + } + } + @Test public void smokin() throws Exception { routers.values().forEach(r -> r.start()); @@ -61,7 +72,7 @@ context, new DirectPublisher(new ProtoKERLAdapter(k)), r, }).toList(); final var dht = (KerlDHT) dhts.values().stream().findFirst().get(); - final KERL testKerl = dht.asKERL(); + final var testKerl = dht.asKERL(); var entropy = SecureRandom.getInstance("SHA1PRNG"); entropy.setSeed(new byte[] { 7, 7, 7 }); var clientKerl = new MemKERL(DigestAlgorithm.DEFAULT); @@ -71,9 +82,9 @@ context, new DirectPublisher(new ProtoKERLAdapter(k)), r, var client = new ControlledIdentifierMember(clientStereotomy.newIdentifier()); // Registering client comms - var clientRouter = new LocalServer(prefix, client).router(ServerConnectionCache.newBuilder().setTarget(2)); + clientRouter = new LocalServer(prefix, client).router(ServerConnectionCache.newBuilder().setTarget(2)); AdmissionsService admissions = mock(AdmissionsService.class); - var clientComminications = clientRouter.create(client, context.getId(), admissions, ":admissions-client", + var clientCommunications = clientRouter.create(client, context.getId(), admissions, ":admissions-client", r -> new AdmissionsServer( clientRouter.getClientIdentityProvider(), r, null), AdmissionsClient.getCreate(null), @@ -81,7 +92,7 @@ context, new DirectPublisher(new ProtoKERLAdapter(k)), r, clientRouter.start(); // Admin client link - var admin = clientComminications.connect(dhts.keySet().stream().findFirst().get()); + var admin = clientCommunications.connect(dhts.keySet().stream().findFirst().get()); assertNotNull(admin); Function attester = sn -> { @@ -111,7 +122,7 @@ context, new DirectPublisher(new ProtoKERLAdapter(k)), r, } @Override - protected BiFunction wrap() { + protected BiFunction wrap() { // This allows us to have the core member keys trusted for this test, as we're testing the bootstrapping of the client, not the entire system return (t, k) -> gate.get() ? new Maat(context, k, k) : k; } diff --git a/thoth/src/test/java/com/salesforce/apollo/thoth/DhtRebalanceTest.java b/thoth/src/test/java/com/salesforce/apollo/thoth/DhtRebalanceTest.java index ea656a0123..a80a9b77b7 100644 --- a/thoth/src/test/java/com/salesforce/apollo/thoth/DhtRebalanceTest.java +++ b/thoth/src/test/java/com/salesforce/apollo/thoth/DhtRebalanceTest.java @@ -6,68 +6,140 @@ */ package com.salesforce.apollo.thoth; +import com.salesforce.apollo.archipelago.LocalServer; +import com.salesforce.apollo.archipelago.Router; +import com.salesforce.apollo.archipelago.ServerConnectionCache; import com.salesforce.apollo.cryptography.DigestAlgorithm; -import com.salesforce.apollo.stereotomy.EventCoordinates; -import com.salesforce.apollo.stereotomy.KERL; -import com.salesforce.apollo.stereotomy.Stereotomy; -import com.salesforce.apollo.stereotomy.StereotomyImpl; +import com.salesforce.apollo.membership.Context; +import com.salesforce.apollo.membership.Member; +import com.salesforce.apollo.membership.SigningMember; +import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; +import com.salesforce.apollo.stereotomy.*; import com.salesforce.apollo.stereotomy.event.KeyEvent; -import com.salesforce.apollo.stereotomy.event.Seal.CoordinatesSeal; +import com.salesforce.apollo.stereotomy.event.Seal; import com.salesforce.apollo.stereotomy.event.Seal.DigestSeal; +import com.salesforce.apollo.stereotomy.identifier.SelfAddressingIdentifier; import com.salesforce.apollo.stereotomy.identifier.spec.InteractionSpecification; import com.salesforce.apollo.stereotomy.identifier.spec.RotationSpecification; +import com.salesforce.apollo.stereotomy.mem.MemKERL; import com.salesforce.apollo.stereotomy.mem.MemKeyStore; +import com.salesforce.apollo.utils.Utils; +import org.h2.jdbcx.JdbcConnectionPool; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; import java.security.SecureRandom; import java.time.Duration; -import java.util.List; +import java.util.*; +import java.util.stream.Collectors; +import java.util.stream.IntStream; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * @author hal.hildebrand */ -public class DhtRebalanceTest extends AbstractDhtTest { - private SecureRandom secureRandom; +public class DhtRebalanceTest { + public static final int CARDINALITY = 23; + private final TreeMap routers = new TreeMap<>(); + private final TreeMap dhts = new TreeMap<>(); + private final TreeMap> contexts = new TreeMap<>(); + private String prefix; + private SecureRandom entropy; + private StereotomyImpl stereotomy; + private MemKERL kerl; + private Map> identities; + + @AfterEach + public void afterIt() throws Exception { + routers.values().forEach(r -> r.close(Duration.ofSeconds(1))); + routers.clear(); + dhts.clear(); + contexts.clear(); + if (identities != null) { + identities.clear(); + } + } @BeforeEach public void beforeIt() throws Exception { - secureRandom = SecureRandom.getInstance("SHA1PRNG"); - secureRandom.setSeed(new byte[] { 0 }); + entropy = SecureRandom.getInstance("SHA1PRNG"); + entropy.setSeed(new byte[] { 6, 6, 6 }); + prefix = UUID.randomUUID().toString(); + kerl = new MemKERL(DigestAlgorithm.DEFAULT); + stereotomy = new StereotomyImpl(new MemKeyStore(), kerl, entropy); + identities = IntStream.range(0, CARDINALITY) + .mapToObj(i -> stereotomy.newIdentifier()) + .collect(Collectors.toMap(controlled -> new ControlledIdentifierMember(controlled), + controlled -> controlled)); + identities.keySet().forEach(member -> instantiate(member)); } - @Test + // @Test public void lifecycle() throws Exception { routers.values().forEach(r -> r.start()); - dhts.values().forEach(dht -> dht.start(Duration.ofSeconds(1))); + var members = new TreeSet(); + var order = dhts.navigableKeySet().stream().toList(); + System.out.println("Order: " + order); + members.add(order.getFirst()); + var fristKerl = dhts.get(order.getFirst()).asKERL(); + dhts.get(order.getFirst()).start(Duration.ofMillis(10)); + + Stereotomy controller = new StereotomyImpl(new MemKeyStore(), fristKerl, entropy); - KERL kerl = dhts.values().stream().findFirst().get().asKERL(); + var identifier = controller.newIdentifier(); + List identifierKerl = fristKerl.kerl(identifier.getIdentifier()); + assertEquals(1, identifierKerl.size()); + assertEquals(KeyEvent.INCEPTION_TYPE, identifierKerl.get(0).event().getIlk()); - Stereotomy controller = new StereotomyImpl(new MemKeyStore(), kerl, secureRandom); + var remaining = order.subList(1, order.size()); + members.add(remaining.getFirst()); + var test = dhts.get(remaining.getFirst()); + test.start(Duration.ofMillis(10)); + var testKerl = test.asKERL(); + members.forEach(m -> { + contexts.values().forEach(c -> c.activate(m)); + }); - var i = controller.newIdentifier(); + assertTrue(Utils.waitForCondition(20_000, 1000, () -> testKerl.kerl(identifier.getIdentifier()).size() == 1)); + var mKerl = testKerl.kerl(identifier.getIdentifier()); + assertEquals(1, mKerl.size()); + assertEquals(KeyEvent.INCEPTION_TYPE, mKerl.get(0).event().getIlk()); var digest = DigestAlgorithm.BLAKE3_256.digest("digest seal".getBytes()); - var event = EventCoordinates.of(kerl.getKeyEvent(i.getLastEstablishmentEvent())); - var seals = List.of(DigestSeal.construct(digest), DigestSeal.construct(digest), - CoordinatesSeal.construct(event)); + var event = EventCoordinates.of(testKerl.getKeyEvent(identifier.getLastEstablishmentEvent())); + var seals = List.of(DigestSeal.construct(digest), DigestSeal.construct(digest), Seal.construct(event)); + + identifier.rotate(); + identifier.seal(InteractionSpecification.newBuilder()); + identifier.rotate(RotationSpecification.newBuilder().addAllSeals(seals)); + identifier.seal(InteractionSpecification.newBuilder().addAllSeals(seals)); + identifier.rotate(); + identifier.rotate(); + + identifierKerl = testKerl.kerl(identifier.getIdentifier()); + assertEquals(7, identifierKerl.size()); + assertEquals(KeyEvent.INCEPTION_TYPE, identifierKerl.get(0).event().getIlk()); + assertEquals(KeyEvent.ROTATION_TYPE, identifierKerl.get(1).event().getIlk()); + assertEquals(KeyEvent.INTERACTION_TYPE, identifierKerl.get(2).event().getIlk()); + assertEquals(KeyEvent.ROTATION_TYPE, identifierKerl.get(3).event().getIlk()); + assertEquals(KeyEvent.INTERACTION_TYPE, identifierKerl.get(4).event().getIlk()); + assertEquals(KeyEvent.ROTATION_TYPE, identifierKerl.get(5).event().getIlk()); + assertEquals(KeyEvent.ROTATION_TYPE, identifierKerl.get(6).event().getIlk()); + } - i.rotate(); - i.seal(InteractionSpecification.newBuilder()); - i.rotate(RotationSpecification.newBuilder().addAllSeals(seals)); - i.seal(InteractionSpecification.newBuilder().addAllSeals(seals)); - i.rotate(); - i.rotate(); - var iKerl = kerl.kerl(i.getIdentifier()); - assertEquals(7, iKerl.size()); - assertEquals(KeyEvent.INCEPTION_TYPE, iKerl.get(0).event().getIlk()); - assertEquals(KeyEvent.ROTATION_TYPE, iKerl.get(1).event().getIlk()); - assertEquals(KeyEvent.INTERACTION_TYPE, iKerl.get(2).event().getIlk()); - assertEquals(KeyEvent.ROTATION_TYPE, iKerl.get(3).event().getIlk()); - assertEquals(KeyEvent.INTERACTION_TYPE, iKerl.get(4).event().getIlk()); - assertEquals(KeyEvent.ROTATION_TYPE, iKerl.get(5).event().getIlk()); - assertEquals(KeyEvent.ROTATION_TYPE, iKerl.get(6).event().getIlk()); + protected void instantiate(SigningMember member) { + var context = Context.newBuilder().build(); + contexts.put(member, context); + context.activate(member); + final var url = String.format("jdbc:h2:mem:%s-%s;DB_CLOSE_ON_EXIT=FALSE", member.getId(), prefix); + context.activate(member); + JdbcConnectionPool connectionPool = JdbcConnectionPool.create(url, "", ""); + connectionPool.setMaxConnections(10); + var router = new LocalServer(prefix, member).router(ServerConnectionCache.newBuilder().setTarget(2)); + routers.put(member, router); + dhts.put(member, new KerlDHT(Duration.ofMillis(3), context, member, (t, k) -> k, connectionPool, + DigestAlgorithm.DEFAULT, router, Duration.ofSeconds(1), 0.0125, null)); } } diff --git a/thoth/src/test/java/com/salesforce/apollo/thoth/KerlDhtTest.java b/thoth/src/test/java/com/salesforce/apollo/thoth/KerlDhtTest.java index 3cedc94f8c..5c0e137ab2 100644 --- a/thoth/src/test/java/com/salesforce/apollo/thoth/KerlDhtTest.java +++ b/thoth/src/test/java/com/salesforce/apollo/thoth/KerlDhtTest.java @@ -35,7 +35,7 @@ public void smokin() throws Exception { var nextKeyPair = specification.getSignatureAlgorithm().generateKeyPair(entropy); var inception = inception(specification, initialKeyPair, factory, nextKeyPair); - var dht = dhts.values().stream().findFirst().get(); + var dht = dhts.firstEntry().getValue(); dht.append(Collections.singletonList(inception.toKeyEvent_())); var lookup = dht.getKeyEvent(inception.getCoordinates().toEventCoords()); diff --git a/thoth/src/test/java/com/salesforce/apollo/thoth/KerlSpaceTest.java b/thoth/src/test/java/com/salesforce/apollo/thoth/KerlSpaceTest.java index 0736076cb7..d0697552e8 100644 --- a/thoth/src/test/java/com/salesforce/apollo/thoth/KerlSpaceTest.java +++ b/thoth/src/test/java/com/salesforce/apollo/thoth/KerlSpaceTest.java @@ -6,13 +6,13 @@ */ package com.salesforce.apollo.thoth; -import com.salesfoce.apollo.thoth.proto.Interval; -import com.salesfoce.apollo.thoth.proto.Intervals; +import com.salesforce.apollo.thoth.proto.Interval; +import com.salesforce.apollo.thoth.proto.Intervals; +import com.salesforce.apollo.bloomFilters.BloomFilter; import com.salesforce.apollo.cryptography.DigestAlgorithm; import com.salesforce.apollo.stereotomy.StereotomyImpl; import com.salesforce.apollo.stereotomy.db.UniKERLDirectPooled; import com.salesforce.apollo.stereotomy.mem.MemKeyStore; -import com.salesforce.apollo.bloomFilters.BloomFilter; import liquibase.Liquibase; import liquibase.database.core.H2Database; import liquibase.exception.LiquibaseException; @@ -22,6 +22,7 @@ import org.junit.jupiter.api.Test; import java.security.SecureRandom; +import java.sql.SQLException; import static org.junit.jupiter.api.Assertions.*; @@ -30,55 +31,62 @@ */ public class KerlSpaceTest { - @Test - public void smokin() throws Exception { - final var digestAlgorithm = DigestAlgorithm.DEFAULT; - var entropy = SecureRandom.getInstance("SHA1PRNG"); - entropy.setSeed(new byte[]{6, 6, 6}); - - JdbcConnectionPool connectionPoolA = JdbcConnectionPool.create("jdbc:h2:mem:A;DB_CLOSE_DELAY=-1", "", ""); - connectionPoolA.setMaxConnections(10); - var spaceA = new KerlSpace(connectionPoolA); - var kerlPoolA = new UniKERLDirectPooled(connectionPoolA, digestAlgorithm); - final var kerlA = kerlPoolA.create(); - var stereotomyA = new StereotomyImpl(new MemKeyStore(), kerlA, entropy); + private static void initialize(JdbcConnectionPool connectionPoolA, JdbcConnectionPool connectionPoolB) + throws SQLException { var database = new H2Database(); try (var connection = connectionPoolA.getConnection()) { database.setConnection(new liquibase.database.jvm.JdbcConnection(connection)); try (Liquibase liquibase = new Liquibase("/initialize-thoth.xml", new ClassLoaderResourceAccessor(), - database)) { + database)) { liquibase.update((String) null); } catch (LiquibaseException e) { throw new IllegalStateException(e); } } - JdbcConnectionPool connectionPoolB = JdbcConnectionPool.create("jdbc:h2:mem:B;DB_CLOSE_DELAY=-1", "", ""); - connectionPoolB.setMaxConnections(10); - var spaceB = new KerlSpace(connectionPoolB); - var kerlPoolB = new UniKERLDirectPooled(connectionPoolB, digestAlgorithm); - final var kerlB = kerlPoolB.create(); - var stereotomyB = new StereotomyImpl(new MemKeyStore(), kerlB, entropy); database = new H2Database(); try (var connection = connectionPoolB.getConnection()) { database.setConnection(new liquibase.database.jvm.JdbcConnection(connection)); try (Liquibase liquibase = new Liquibase("/initialize-thoth.xml", new ClassLoaderResourceAccessor(), - database)) { + database)) { liquibase.update((String) null); } catch (LiquibaseException e) { throw new IllegalStateException(e); } } + } + + @Test + public void smokin() throws Exception { + final var digestAlgorithm = DigestAlgorithm.DEFAULT; + var entropy = SecureRandom.getInstance("SHA1PRNG"); + entropy.setSeed(new byte[] { 6, 6, 6 }); + + JdbcConnectionPool connectionPoolA = JdbcConnectionPool.create("jdbc:h2:mem:A;DB_CLOSE_DELAY=-1", "", ""); + connectionPoolA.setMaxConnections(10); + JdbcConnectionPool connectionPoolB = JdbcConnectionPool.create("jdbc:h2:mem:B;DB_CLOSE_DELAY=-1", "", ""); + connectionPoolB.setMaxConnections(10); + + var spaceA = new KerlSpace(connectionPoolA); + var stereotomyA = new StereotomyImpl(new MemKeyStore(), + new UniKERLDirectPooled(connectionPoolA, digestAlgorithm).create(), + entropy); + var spaceB = new KerlSpace(connectionPoolB); + var stereotomyB = new StereotomyImpl(new MemKeyStore(), + new UniKERLDirectPooled(connectionPoolB, digestAlgorithm).create(), + entropy); + + initialize(connectionPoolA, connectionPoolB); var identifierA = stereotomyA.newIdentifier(); try (var connection = connectionPoolA.getConnection()) { KerlDHT.updateLocationHash(identifierA.getIdentifier(), digestAlgorithm, DSL.using(connection)); } + identifierA.rotate(); var digestA = identifierA.getLastEstablishingEvent().getCoordinates().getDigest(); - var biffA = spaceA.populate(0x1638, new CombinedIntervals(new KeyInterval(digestAlgorithm.getOrigin(), - digestAlgorithm.getLast())), - 0.125); + var biffA = spaceA.populate(0x1638, new CombinedIntervals( + new KeyInterval(digestAlgorithm.getOrigin(), digestAlgorithm.getLast())), 0.000125); assertNotNull(biffA); var bffA = BloomFilter.from(biffA); @@ -88,9 +96,8 @@ public void smokin() throws Exception { try (var connection = connectionPoolB.getConnection()) { KerlDHT.updateLocationHash(identifierB.getIdentifier(), digestAlgorithm, DSL.using(connection)); } - var biffB = spaceB.populate(0x1638, new CombinedIntervals(new KeyInterval(digestAlgorithm.getOrigin(), - digestAlgorithm.getLast())), - 0.125); + var biffB = spaceB.populate(0x1638, new CombinedIntervals( + new KeyInterval(digestAlgorithm.getOrigin(), digestAlgorithm.getLast())), 0.000125); assertNotNull(biffB); var bffB = BloomFilter.from(biffB); @@ -100,35 +107,39 @@ public void smokin() throws Exception { assertTrue(bffB.contains(digestB)); assertFalse(bffB.contains(digestA)); - assertNull(kerlA.getKeyState(identifierB.getIdentifier())); - assertNull(kerlB.getKeyState(identifierA.getIdentifier())); + assertNull( + new UniKERLDirectPooled(connectionPoolA, digestAlgorithm).create().getKeyState(identifierB.getIdentifier())); + assertNull( + new UniKERLDirectPooled(connectionPoolB, digestAlgorithm).create().getKeyState(identifierA.getIdentifier())); var updateA = spaceA.reconcile(Intervals.newBuilder() - .addIntervals(Interval.newBuilder() - .setStart(digestAlgorithm.getOrigin().toDigeste()) - .setEnd(digestAlgorithm.getLast().toDigeste()) - .build()) - .setHave(biffB) - .build(), - kerlA); + .addIntervals(Interval.newBuilder() + .setStart(digestAlgorithm.getOrigin().toDigeste()) + .setEnd(digestAlgorithm.getLast().toDigeste()) + .build()) + .setHave(biffB) + .build(), + new UniKERLDirectPooled(connectionPoolA, digestAlgorithm).create()); assertNotNull(updateA); assertEquals(2, updateA.getEventsCount()); var updateB = spaceB.reconcile(Intervals.newBuilder() - .addIntervals(Interval.newBuilder() - .setStart(digestAlgorithm.getOrigin().toDigeste()) - .setEnd(digestAlgorithm.getLast().toDigeste()) - .build()) - .setHave(biffA) - .build(), - kerlB); + .addIntervals(Interval.newBuilder() + .setStart(digestAlgorithm.getOrigin().toDigeste()) + .setEnd(digestAlgorithm.getLast().toDigeste()) + .build()) + .setHave(biffA) + .build(), + new UniKERLDirectPooled(connectionPoolB, digestAlgorithm).create()); assertNotNull(updateB); assertEquals(2, updateB.getEventsCount()); - spaceA.update(updateB.getEventsList(), kerlA); - spaceB.update(updateA.getEventsList(), kerlB); + spaceA.update(updateB.getEventsList(), new UniKERLDirectPooled(connectionPoolA, digestAlgorithm).create()); + spaceB.update(updateA.getEventsList(), new UniKERLDirectPooled(connectionPoolB, digestAlgorithm).create()); - assertNotNull(kerlA.getKeyState(identifierB.getIdentifier())); - assertNotNull(kerlB.getKeyState(identifierA.getIdentifier())); + assertNotNull( + new UniKERLDirectPooled(connectionPoolA, digestAlgorithm).create().getKeyState(identifierB.getIdentifier())); + assertNotNull( + new UniKERLDirectPooled(connectionPoolB, digestAlgorithm).create().getKeyState(identifierA.getIdentifier())); } } diff --git a/thoth/src/test/java/com/salesforce/apollo/thoth/KerlTest.java b/thoth/src/test/java/com/salesforce/apollo/thoth/KerlTest.java index 6d25a52c8f..481ab9057c 100644 --- a/thoth/src/test/java/com/salesforce/apollo/thoth/KerlTest.java +++ b/thoth/src/test/java/com/salesforce/apollo/thoth/KerlTest.java @@ -12,7 +12,7 @@ import com.salesforce.apollo.stereotomy.*; import com.salesforce.apollo.stereotomy.event.EstablishmentEvent; import com.salesforce.apollo.stereotomy.event.KeyEvent; -import com.salesforce.apollo.stereotomy.event.Seal.CoordinatesSeal; +import com.salesforce.apollo.stereotomy.event.Seal; import com.salesforce.apollo.stereotomy.event.Seal.DigestSeal; import com.salesforce.apollo.stereotomy.identifier.Identifier; import com.salesforce.apollo.stereotomy.identifier.SelfAddressingIdentifier; @@ -49,7 +49,7 @@ public void delegated() throws Exception { routers.values().forEach(r -> r.start()); dhts.values().forEach(dht -> dht.start(Duration.ofSeconds(1))); - KERL kerl = dhts.values().stream().findFirst().get().asKERL(); + var kerl = dhts.firstEntry().getValue().asKERL(); var ks = new MemKeyStore(); Stereotomy controller = new StereotomyImpl(ks, kerl, secureRandom); @@ -114,8 +114,7 @@ public void delegated() throws Exception { var digest = DigestAlgorithm.BLAKE3_256.digest("digest seal".getBytes()); var event = EventCoordinates.of(kerl.getKeyEvent(delegated.getLastEstablishmentEvent())); - var seals = List.of(DigestSeal.construct(digest), DigestSeal.construct(digest), - CoordinatesSeal.construct(event)); + var seals = List.of(DigestSeal.construct(digest), DigestSeal.construct(digest), Seal.construct(event)); delegated.rotate(); delegated.seal(InteractionSpecification.newBuilder()); @@ -128,7 +127,7 @@ public void direct() throws Exception { routers.values().forEach(r -> r.start()); dhts.values().forEach(dht -> dht.start(Duration.ofSeconds(1))); - KERL kerl = dhts.values().stream().findFirst().get().asKERL(); + var kerl = dhts.firstEntry().getValue().asKERL(); Stereotomy controller = new StereotomyImpl(new MemKeyStore(), kerl, secureRandom); @@ -136,8 +135,7 @@ public void direct() throws Exception { var digest = DigestAlgorithm.BLAKE3_256.digest("digest seal".getBytes()); var event = EventCoordinates.of(kerl.getKeyEvent(i.getLastEstablishmentEvent())); - var seals = List.of(DigestSeal.construct(digest), DigestSeal.construct(digest), - CoordinatesSeal.construct(event)); + var seals = List.of(DigestSeal.construct(digest), DigestSeal.construct(digest), Seal.construct(event)); i.rotate(); i.seal(InteractionSpecification.newBuilder()); diff --git a/thoth/src/test/java/com/salesforce/apollo/thoth/PublisherTest.java b/thoth/src/test/java/com/salesforce/apollo/thoth/PublisherTest.java index a6b44f6073..485fe03acf 100644 --- a/thoth/src/test/java/com/salesforce/apollo/thoth/PublisherTest.java +++ b/thoth/src/test/java/com/salesforce/apollo/thoth/PublisherTest.java @@ -6,7 +6,7 @@ */ package com.salesforce.apollo.thoth; -import com.salesfoce.apollo.stereotomy.event.proto.KERL_; +import com.salesforce.apollo.stereotomy.event.proto.KERL_; import com.salesforce.apollo.archipelago.LocalServer; import com.salesforce.apollo.archipelago.RouterImpl.CommonCommunications; import com.salesforce.apollo.archipelago.ServerConnectionCache; @@ -38,8 +38,6 @@ public class PublisherTest { @Test public void smokin() throws Exception { - - var exec = Executors.newVirtualThreadPerTaskExecutor(); var entropy = SecureRandom.getInstance("SHA1PRNG"); entropy.setSeed(new byte[] { 6, 6, 6 }); final var kerl_ = new MemKERL(DigestAlgorithm.DEFAULT); diff --git a/thoth/src/test/java/com/salesforce/apollo/thoth/ThothServerTest.java b/thoth/src/test/java/com/salesforce/apollo/thoth/ThothServerTest.java new file mode 100644 index 0000000000..06642ec51f --- /dev/null +++ b/thoth/src/test/java/com/salesforce/apollo/thoth/ThothServerTest.java @@ -0,0 +1,121 @@ +package com.salesforce.apollo.thoth; + +import com.google.protobuf.Empty; +import com.salesforce.apollo.cryptography.DigestAlgorithm; +import com.salesforce.apollo.membership.stereotomy.ControlledIdentifierMember; +import com.salesforce.apollo.stereotomy.ControlledIdentifier; +import com.salesforce.apollo.stereotomy.EventCoordinates; +import com.salesforce.apollo.stereotomy.Stereotomy; +import com.salesforce.apollo.stereotomy.StereotomyImpl; +import com.salesforce.apollo.stereotomy.event.InceptionEvent; +import com.salesforce.apollo.stereotomy.event.RotationEvent; +import com.salesforce.apollo.stereotomy.event.Seal; +import com.salesforce.apollo.stereotomy.event.protobuf.ProtobufEventFactory; +import com.salesforce.apollo.stereotomy.identifier.Identifier; +import com.salesforce.apollo.stereotomy.identifier.SelfAddressingIdentifier; +import com.salesforce.apollo.stereotomy.identifier.spec.InteractionSpecification; +import com.salesforce.apollo.stereotomy.mem.MemKERL; +import com.salesforce.apollo.stereotomy.mem.MemKeyStore; +import com.salesforce.apollo.thoth.grpc.ThothServer; +import com.salesforce.apollo.thoth.proto.Thoth_Grpc; +import io.grpc.Channel; +import io.grpc.ServerBuilder; +import io.grpc.inprocess.InProcessChannelBuilder; +import io.grpc.inprocess.InProcessServerBuilder; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.security.SecureRandom; +import java.util.Collections; +import java.util.UUID; +import java.util.concurrent.TimeUnit; + +import static org.junit.jupiter.api.Assertions.assertNotNull; + +/** + * @author hal.hildebrand + **/ +public class ThothServerTest { + private SecureRandom secureRandom; + + @BeforeEach + public void before() throws Exception { + secureRandom = SecureRandom.getInstance("SHA1PRNG"); + secureRandom.setSeed(new byte[] { 0 }); + } + + @Test + public void smokin() throws Exception { + var ks = new MemKeyStore(); + var kerl = new MemKERL(DigestAlgorithm.DEFAULT); + Stereotomy stereotomy = new StereotomyImpl(ks, kerl, secureRandom); + var member = new ControlledIdentifierMember(stereotomy.newIdentifier()); + + var localId = UUID.randomUUID().toString(); + ServerBuilder serverBuilder = InProcessServerBuilder.forName(localId) + .addService(new ThothServer(new Thoth(stereotomy))); + var server = serverBuilder.build(); + server.start(); + var channel = InProcessChannelBuilder.forName(localId).usePlaintext().build(); + try { + var thoth = new ThothClient(channel); + ControlledIdentifier controller = stereotomy.newIdentifier(); + + // delegated inception + var incp = thoth.inception(controller.getIdentifier()); + assertNotNull(incp); + + var seal = Seal.EventSeal.construct(incp.getIdentifier(), incp.hash(stereotomy.digestAlgorithm()), + incp.getSequenceNumber().longValue()); + + var builder = InteractionSpecification.newBuilder().addAllSeals(Collections.singletonList(seal)); + + // Commit + EventCoordinates coords = controller.seal(builder); + thoth.commit(coords); + assertNotNull(thoth.identifier()); + + // Delegated rotation + var rot = thoth.rotate(); + + assertNotNull(rot); + + seal = Seal.EventSeal.construct(rot.getIdentifier(), rot.hash(stereotomy.digestAlgorithm()), + rot.getSequenceNumber().longValue()); + + builder = InteractionSpecification.newBuilder().addAllSeals(Collections.singletonList(seal)); + + // Commit + coords = controller.seal(builder); + thoth.commit(coords); + } finally { + channel.shutdown(); + server.shutdown(); + server.awaitTermination(3, TimeUnit.SECONDS); + } + } + + private static class ThothClient { + private Thoth_Grpc.Thoth_BlockingStub client; + + private ThothClient(Channel channel) { + this.client = Thoth_Grpc.newBlockingStub(channel); + } + + public void commit(EventCoordinates coordinates) { + client.commit(coordinates.toEventCoords()); + } + + public SelfAddressingIdentifier identifier() { + return (SelfAddressingIdentifier) Identifier.from(client.identifier(Empty.getDefaultInstance())); + } + + public InceptionEvent inception(SelfAddressingIdentifier identifier) { + return ProtobufEventFactory.toKeyEvent(client.inception(identifier.toIdent())); + } + + public RotationEvent rotate() { + return ProtobufEventFactory.toKeyEvent(client.rotate(Empty.getDefaultInstance())); + } + } +} diff --git a/thoth/src/test/java/com/salesforce/apollo/thoth/ThothTest.java b/thoth/src/test/java/com/salesforce/apollo/thoth/ThothTest.java index a31f5a4cc1..887b9ba629 100644 --- a/thoth/src/test/java/com/salesforce/apollo/thoth/ThothTest.java +++ b/thoth/src/test/java/com/salesforce/apollo/thoth/ThothTest.java @@ -35,7 +35,7 @@ public class ThothTest { @BeforeEach public void before() throws Exception { secureRandom = SecureRandom.getInstance("SHA1PRNG"); - secureRandom.setSeed(new byte[]{0}); + secureRandom.setSeed(new byte[] { 0 }); } @Test @@ -50,11 +50,11 @@ public void smokin() throws Exception { // delegated inception var incp = thoth.inception(controller.getIdentifier(), - IdentifierSpecification.newBuilder()); + IdentifierSpecification.newBuilder()); assertNotNull(incp); var seal = Seal.EventSeal.construct(incp.getIdentifier(), incp.hash(stereotomy.digestAlgorithm()), - incp.getSequenceNumber().longValue()); + incp.getSequenceNumber().longValue()); var builder = InteractionSpecification.newBuilder().addAllSeals(Collections.singletonList(seal)); @@ -69,7 +69,7 @@ public void smokin() throws Exception { assertNotNull(rot); seal = Seal.EventSeal.construct(rot.getIdentifier(), rot.hash(stereotomy.digestAlgorithm()), - rot.getSequenceNumber().longValue()); + rot.getSequenceNumber().longValue()); builder = InteractionSpecification.newBuilder().addAllSeals(Collections.singletonList(seal)); diff --git a/thoth/src/test/resources/logback-test.xml b/thoth/src/test/resources/logback-test.xml index 2821702fc1..e6965c4e3e 100644 --- a/thoth/src/test/resources/logback-test.xml +++ b/thoth/src/test/resources/logback-test.xml @@ -2,53 +2,57 @@ - + %d{mm:ss.SSS} [%thread] %-5level %logger{0} - - %msg%n + %msg%n - + - + - + - + - + - - + + - - + + + + + + - + - + - +