diff --git a/backend/api/documents/v3alpha/access_control.go b/backend/api/documents/v3alpha/access_control.go index bcde3255..ba7df9ef 100644 --- a/backend/api/documents/v3alpha/access_control.go +++ b/backend/api/documents/v3alpha/access_control.go @@ -3,11 +3,11 @@ package documents import ( "context" "fmt" + "seed/backend/blob" "seed/backend/core" documents "seed/backend/genproto/documents/v3alpha" - "seed/backend/blob" + "seed/backend/util/cclock" "seed/backend/util/errutil" - "time" "github.com/ipfs/go-cid" cbornode "github.com/ipfs/go-ipld-cbor" @@ -65,7 +65,7 @@ func (srv *Server) CreateCapability(ctx context.Context, in *documents.CreateCap // TODO(burdiyan): Validate role according to the chain of capabilities. role := in.Role.String() - cpb, err := blob.NewCapability(kp, del, acc, in.Path, role, time.Now().UnixMicro(), in.NoRecursive) + cpb, err := blob.NewCapability(kp, del, acc, in.Path, role, cclock.New().MustNow(), in.NoRecursive) if err != nil { return nil, err } @@ -146,11 +146,11 @@ func capToProto(c cid.Cid, cpb *blob.Capability) (*documents.Capability, error) Id: c.String(), Issuer: cpb.Issuer.String(), Delegate: cpb.Delegate.String(), - Account: cpb.Account.String(), + Account: cpb.Space.String(), Path: cpb.Path, Role: documents.Role(role), IsExact: cpb.NoRecursive, - CreateTime: timestamppb.New(time.UnixMicro(cpb.Ts)), + CreateTime: timestamppb.New(cpb.Ts), } return pb, nil diff --git a/backend/api/documents/v3alpha/comments.go b/backend/api/documents/v3alpha/comments.go index 48089423..9a40aaaa 100644 --- a/backend/api/documents/v3alpha/comments.go +++ b/backend/api/documents/v3alpha/comments.go @@ -5,12 +5,12 @@ import ( "errors" "fmt" "seed/backend/api/documents/v3alpha/docmodel" + "seed/backend/blob" "seed/backend/core" documents "seed/backend/genproto/documents/v3alpha" - "seed/backend/hlc" - "seed/backend/blob" + "seed/backend/util/cclock" "seed/backend/util/errutil" - "time" + "seed/backend/util/must" "github.com/ipfs/go-cid" cbornode "github.com/ipfs/go-ipld-cbor" @@ -43,12 +43,12 @@ func (srv *Server) CreateComment(ctx context.Context, in *documents.CreateCommen return nil, err } - acc, err := core.DecodePrincipal(in.TargetAccount) + space, err := core.DecodePrincipal(in.TargetAccount) if err != nil { return nil, status.Errorf(codes.InvalidArgument, "failed to parse target account: %v", err) } - clock := hlc.NewClock() + clock := cclock.New() var ( threadRoot cid.Cid @@ -75,7 +75,7 @@ func (srv *Server) CreateComment(ctx context.Context, in *documents.CreateCommen threadRoot = replyParent } - if err := clock.Track(hlc.Timestamp(rp.Ts)); err != nil { + if err := clock.Track(rp.Ts); err != nil { return nil, err } @@ -84,13 +84,7 @@ func (srv *Server) CreateComment(ctx context.Context, in *documents.CreateCommen } } - target := blob.CommentTarget{ - Account: acc, - Path: in.TargetPath, - Version: versionHeads, - } - - blob, err := blob.NewComment(kp, cid.Undef, target, threadRoot, replyParent, commentContentFromProto(in.Content), int64(clock.MustNow())) + blob, err := blob.NewComment(kp, cid.Undef, space, in.TargetPath, versionHeads, threadRoot, replyParent, commentContentFromProto(in.Content), clock.MustNow()) if err != nil { return nil, err } @@ -158,12 +152,12 @@ func (srv *Server) ListComments(ctx context.Context, in *documents.ListCommentsR func commentToProto(c cid.Cid, cmt *blob.Comment) (*documents.Comment, error) { pb := &documents.Comment{ Id: c.String(), - TargetAccount: cmt.Target.Account.String(), - TargetPath: cmt.Target.Path, - TargetVersion: docmodel.NewVersion(cmt.Target.Version...).String(), + TargetAccount: cmt.Space.String(), + TargetPath: cmt.Path, + TargetVersion: docmodel.NewVersion(cmt.Version...).String(), Author: cmt.Author.String(), Content: commentContentToProto(cmt.Body), - CreateTime: timestamppb.New(time.UnixMicro(cmt.Ts)), + CreateTime: timestamppb.New(cmt.Ts), } if cmt.ReplyParent.Defined() { @@ -189,14 +183,7 @@ func commentContentToProto(in []blob.CommentBlock) []*documents.BlockNode { out := make([]*documents.BlockNode, len(in)) for i, b := range in { out[i] = &documents.BlockNode{ - Block: &documents.Block{ - Id: b.ID, - Type: b.Type, - Text: b.Text, - Ref: b.Ref, - Attributes: b.Attributes, - Annotations: annotationsToProto(b.Annotations), - }, + Block: docmodel.BlockToProto(b.Block, cid.Undef), Children: commentContentToProto(b.Children), } } @@ -204,25 +191,6 @@ func commentContentToProto(in []blob.CommentBlock) []*documents.BlockNode { return out } -func annotationsToProto(in []blob.Annotation) []*documents.Annotation { - if in == nil { - return nil - } - - out := make([]*documents.Annotation, len(in)) - for i, a := range in { - out[i] = &documents.Annotation{ - Type: a.Type, - Ref: a.Ref, - Attributes: a.Attributes, - Starts: a.Starts, - Ends: a.Ends, - } - } - - return out -} - func commentContentFromProto(in []*documents.BlockNode) []blob.CommentBlock { if in == nil { return nil @@ -232,36 +200,10 @@ func commentContentFromProto(in []*documents.BlockNode) []blob.CommentBlock { for i, n := range in { out[i] = blob.CommentBlock{ - Block: blob.Block{ - ID: n.Block.Id, - Type: n.Block.Type, - Text: n.Block.Text, - Ref: n.Block.Ref, - Attributes: n.Block.Attributes, - Annotations: annotationsFromProto(n.Block.Annotations), - }, + Block: must.Do2(docmodel.BlockFromProto(n.Block)), Children: commentContentFromProto(n.Children), } } return out } - -func annotationsFromProto(in []*documents.Annotation) []blob.Annotation { - if in == nil { - return nil - } - - out := make([]blob.Annotation, len(in)) - for i, a := range in { - out[i] = blob.Annotation{ - Type: a.Type, - Ref: a.Ref, - Attributes: a.Attributes, - Starts: a.Starts, - Ends: a.Ends, - } - } - - return out -} diff --git a/backend/api/documents/v3alpha/comments_test.go b/backend/api/documents/v3alpha/comments_test.go index 4aa1b3da..83245f00 100644 --- a/backend/api/documents/v3alpha/comments_test.go +++ b/backend/api/documents/v3alpha/comments_test.go @@ -5,7 +5,6 @@ import ( "seed/backend/core/coretest" pb "seed/backend/genproto/documents/v3alpha" "seed/backend/testutil" - "seed/backend/util/debugx" "testing" "github.com/stretchr/testify/require" @@ -89,6 +88,4 @@ func TestComments_Smoke(t *testing.T) { require.NoError(t, err) testutil.StructsEqual(want, list).Compare(t, "comment list must match") - - debugx.Dump(list) } diff --git a/backend/api/documents/v3alpha/dochistory.go b/backend/api/documents/v3alpha/dochistory.go index 2d8f04a2..c7cb89ae 100644 --- a/backend/api/documents/v3alpha/dochistory.go +++ b/backend/api/documents/v3alpha/dochistory.go @@ -6,7 +6,6 @@ import ( "seed/backend/api/documents/v3alpha/docmodel" "seed/backend/core" documents "seed/backend/genproto/documents/v3alpha" - "seed/backend/hlc" "seed/backend/util/apiutil" "seed/backend/util/colx" "seed/backend/util/errutil" @@ -69,7 +68,7 @@ func (srv *Server) ListDocumentChanges(ctx context.Context, in *documents.ListDo Changes: make([]*documents.DocumentChangeInfo, 0, in.PageSize), } - changes, err := doc.Entity().BFTDeps(slices.Collect(maps.Keys(doc.Entity().Heads()))) + changes, err := doc.BFTDeps(slices.Collect(maps.Keys(doc.Heads()))) if err != nil { return nil, err } @@ -97,7 +96,7 @@ func (srv *Server) ListDocumentChanges(ctx context.Context, in *documents.ListDo Id: cc, Author: change.Data.Author.String(), Deps: colx.SliceMap(change.Data.Deps, cid.Cid.String), - CreateTime: timestamppb.New(hlc.Timestamp(change.Data.Ts).Time()), + CreateTime: timestamppb.New(change.Data.Ts), }) } diff --git a/backend/api/documents/v3alpha/docmodel/entity.go b/backend/api/documents/v3alpha/docmodel/crdt.go similarity index 58% rename from backend/api/documents/v3alpha/docmodel/entity.go rename to backend/api/documents/v3alpha/docmodel/crdt.go index 1aecad42..7896388a 100644 --- a/backend/api/documents/v3alpha/docmodel/entity.go +++ b/backend/api/documents/v3alpha/docmodel/crdt.go @@ -5,14 +5,15 @@ import ( "crypto/rand" "crypto/sha256" "encoding/binary" + "encoding/hex" "fmt" "iter" "seed/backend/blob" "seed/backend/core" - "seed/backend/crdt2" - "seed/backend/hlc" + "seed/backend/util/cclock" "sort" "strings" + "time" "github.com/ipfs/go-cid" "github.com/multiformats/go-multibase" @@ -20,73 +21,163 @@ import ( "golang.org/x/exp/slices" ) -// Entity is our CRDT mutable object. -type Entity struct { - id blob.IRI - cids []cid.Cid - changes []*blob.Change - deps [][]int // deps for each change. - rdeps [][]int // reverse deps for each change. - applied map[cid.Cid]int - heads map[cid.Cid]struct{} - state *crdt2.Map - maxClock *hlc.Clock - actorsIntern map[string]string - vectorClock map[string]int64 +type opID struct { + Ts int64 + Origin string + Idx int } -// NewEntity creates a new entity with a given ID. -func NewEntity(id blob.IRI) *Entity { - return &Entity{ - id: id, - applied: make(map[cid.Cid]int), - heads: make(map[cid.Cid]struct{}), - state: crdt2.NewMap(), - maxClock: hlc.NewClock(), - actorsIntern: make(map[string]string), - vectorClock: make(map[string]int64), +func (o opID) String() string { + var out []byte + out = binary.BigEndian.AppendUint64(out, uint64(o.Ts)) + out = binary.BigEndian.AppendUint32(out, uint32(o.Idx)) + out = append(out, o.Origin...) + + return hex.EncodeToString(out) +} + +func decodeOpID(s string) (opID, error) { + in, err := hex.DecodeString(s) + if err != nil { + return opID{}, err } + + var out opID + out.Ts = int64(binary.BigEndian.Uint64(in[:8])) + out.Idx = int(binary.BigEndian.Uint32(in[8:12])) + out.Origin = string(in[12:]) + + return out, nil } -// NewEntityWithClock creates a new entity with a provided clock. -func NewEntityWithClock(id blob.IRI, clock *hlc.Clock) *Entity { - e := NewEntity(id) - e.maxClock = clock - return e +func newOpID(ts int64, origin string, idx int) opID { + return opID{ + Ts: ts, + Origin: origin, + Idx: idx, + } } -// ID returns the ID of the entity. -func (e *Entity) ID() blob.IRI { return e.id } +func (o opID) Compare(oo opID) int { + if o.Ts < oo.Ts { + return -1 + } -// Get a property under a given path. -func (e *Entity) Get(path ...string) (value any, ok bool) { - return e.state.Get(path...) + if o.Ts > oo.Ts { + return +1 + } + + if o.Idx < oo.Idx { + return -1 + } + + if o.Idx > oo.Idx { + return +1 + } + + return cmp.Compare(o.Origin, oo.Origin) } -// LastChangeTime is max time tracked in the HLC. -func (e *Entity) LastChangeTime() hlc.Timestamp { - return e.maxClock.Max() +func (op opID) Encode() EncodedOpID { + const ( + maxTimestamp = 1<<48 - 1 + maxIdx = 1<<24 - 1 + ) + + if op.Ts >= maxTimestamp { + panic("BUG: operation timestamp is too large") + } + + if op.Idx >= maxIdx { + panic("BUG: operation index is too large") + } + + var e EncodedOpID + + e[0] = byte(op.Ts >> 40) + e[1] = byte(op.Ts >> 32) + e[2] = byte(op.Ts >> 24) + e[3] = byte(op.Ts >> 16) + e[4] = byte(op.Ts >> 8) + e[5] = byte(op.Ts) + + e[6] = byte(op.Idx >> 16) + e[7] = byte(op.Idx >> 8) + e[8] = byte(op.Idx) + + copy(e[9:], op.Origin) + return e } -func (e *Entity) State() *crdt2.Map { - return e.state +// EncodedOpID is a CRDT Op ID that is compactly encoded in the following way: +// - 6 bytes (48 bits): timestamp. Enough precision to track Unix millisecond timestamps for thousands for years. +// - 3 bytes (24 bits): index/offset of the operation within the same Change/Transaction. +// - 6 bytes (48 bits): origin/replica/actor. Random 48-bit value of a replica that generated the operation. +// The timestamp and index are big-endian, to support lexicographic ordering of the IDs. +// This has some limitations: +// 1. Maximum number of operations in a single change is 16777215. +// 2. Same actor must not generate Changes/Transactions within the same millisecond. +// 3. The clocks on the devices generating the operations must be roughly syncronized to avoid inter-device conflicts in timestamps. +type EncodedOpID [15]byte + +type docCRDT struct { + id blob.IRI + cids []cid.Cid + changes []*blob.Change + deps [][]int // deps for each change. + rdeps [][]int // reverse deps for each change. + applied map[cid.Cid]int + heads map[cid.Cid]struct{} + + tree *treeOpSet + + stateMetadata map[string]*mvReg[string] + stateBlocks map[string]*mvReg[blob.Block] // blockID -> opid -> block state. + + clock *cclock.Clock + actorsIntern map[string]string + vectorClock map[string]time.Time +} + +func newCRDT(id blob.IRI, clock *cclock.Clock) *docCRDT { + e := &docCRDT{ + id: id, + applied: make(map[cid.Cid]int), + heads: make(map[cid.Cid]struct{}), + tree: newTreeOpSet(), + stateMetadata: make(map[string]*mvReg[string]), + stateBlocks: make(map[string]*mvReg[blob.Block]), + clock: cclock.New(), + actorsIntern: make(map[string]string), + vectorClock: make(map[string]time.Time), + } + e.clock = clock + return e +} + +func (e *docCRDT) GetMetadata() map[string]string { + out := make(map[string]string, len(e.stateMetadata)) + + for k, v := range e.stateMetadata { + vv, ok := v.GetLatestOK() + if ok { + out[k] = vv + } + } + + return out } // Heads returns the map of head changes. // This must be read only. Not safe for concurrency. -func (e *Entity) Heads() map[cid.Cid]struct{} { +func (e *docCRDT) Heads() map[cid.Cid]struct{} { return e.heads } -// NumChanges returns the number of changes applied to the entity. -func (e *Entity) NumChanges() int { - return len(e.cids) -} - // Checkout returns an entity with the state filtered up to the given heads. // If no heads are given it returns the same instance of the Entity. // If heads given are the same as the current heads, the same instance is returned as well. -func (e *Entity) Checkout(heads []cid.Cid) (*Entity, error) { +func (e *docCRDT) Checkout(heads []cid.Cid) (*docCRDT, error) { if len(heads) == 0 { return e, nil } @@ -130,14 +221,11 @@ func (e *Entity) Checkout(heads []cid.Cid) (*Entity, error) { } slices.Reverse(chain) - clock := hlc.NewClock() - entity := NewEntityWithClock(e.id, clock) + clock := cclock.New() + entity := newCRDT(e.id, clock) for _, c := range chain { - if err := entity.ApplyChange(blob.ChangeRecord{ - CID: e.cids[c], - Data: e.changes[c], - }); err != nil { + if err := entity.ApplyChange(e.cids[c], e.changes[c]); err != nil { return nil, err } } @@ -182,7 +270,7 @@ func (v Version) Parse() ([]cid.Cid, error) { return out, nil } -func (e *Entity) Version() Version { +func (e *docCRDT) Version() Version { if len(e.heads) == 0 { return "" } @@ -191,7 +279,7 @@ func (e *Entity) Version() Version { } // BFTDeps returns a single-use iterator that does breadth-first traversal of the Change DAG deps. -func (e *Entity) BFTDeps(start []cid.Cid) (iter.Seq2[int, blob.ChangeRecord], error) { +func (e *docCRDT) BFTDeps(start []cid.Cid) (iter.Seq2[int, blob.ChangeRecord], error) { visited := make(map[int]struct{}, len(e.cids)) queue := make([]int, 0, len(e.cids)) var scratch []int @@ -202,7 +290,7 @@ func (e *Entity) BFTDeps(start []cid.Cid) (iter.Seq2[int, blob.ChangeRecord], er if e.changes[i].Ts == e.changes[j].Ts { return cmp.Compare(e.cids[i].KeyString(), e.cids[j].KeyString()) } - return cmp.Compare(e.changes[i].Ts, e.changes[j].Ts) + return cmp.Compare(e.changes[i].Ts.UnixNano(), e.changes[j].Ts.UnixNano()) }) queue = append(queue, scratch...) } @@ -239,15 +327,14 @@ func (e *Entity) BFTDeps(start []cid.Cid) (iter.Seq2[int, blob.ChangeRecord], er }, nil } -// ApplyChange to the internal state. -func (e *Entity) ApplyChange(rec blob.ChangeRecord) error { - if _, ok := e.applied[rec.CID]; ok { +func (e *docCRDT) ApplyChange(c cid.Cid, ch *blob.Change) error { + if _, ok := e.applied[c]; ok { return nil } var actor string { - au := rec.Data.Author.UnsafeString() + au := ch.Author.UnsafeString() a, ok := e.actorsIntern[au] if !ok { e.actorsIntern[au] = au @@ -256,47 +343,95 @@ func (e *Entity) ApplyChange(rec blob.ChangeRecord) error { actor = a } - if rec.Data.Ts < e.vectorClock[actor] { - return fmt.Errorf("applying change '%s' violates causal order", rec.CID) + if tracked := e.vectorClock[actor]; ch.Ts.Before(tracked) { + return fmt.Errorf("applying change '%s' violates causal order: incoming=%s tracked=%s", c, ch.Ts, tracked) } - e.vectorClock[actor] = rec.Data.Ts + e.vectorClock[actor] = ch.Ts // TODO(hm24): is this check necessary? // if ch.Ts < int64(e.maxClock.Max()) { // return fmt.Errorf("applying change '%s' out of causal order", c) // } - deps := make([]int, len(rec.Data.Deps)) + deps := make([]int, len(ch.Deps)) - for i, dep := range rec.Data.Deps { + for i, dep := range ch.Deps { depIdx, ok := e.applied[dep] if !ok { - return fmt.Errorf("missing dependency %s of change %s", dep, rec.CID) + return fmt.Errorf("missing dependency %s of change %s", dep, c) } deps[i] = depIdx } - if err := e.maxClock.Track(hlc.Timestamp(rec.Data.Ts)); err != nil { + if err := e.clock.Track(ch.Ts); err != nil { return err } - e.state.ApplyPatch(int64(rec.Data.Ts), OriginFromCID(rec.CID), rec.Data.Payload) + ts := ch.Ts.UnixMicro() + origin := originFromCID(c) + + for idx, op := range ch.Ops { + opid := newOpID(ts, origin, idx) + switch op.Type { + case blob.OpSetMetadata: + for k, v := range op.Data { + reg := e.stateMetadata[k] + if reg == nil { + reg = newMVReg[string]() + e.stateMetadata[k] = reg + } + reg.Set(opid, v.(string)) + } + case blob.OpReplaceBlock: + var blk blob.Block + blob.MapToCBOR(op.Data, &blk) + + reg := e.stateBlocks[blk.ID] + if reg == nil { + reg = newMVReg[blob.Block]() + e.stateBlocks[blk.ID] = reg + } + reg.Set(opid, blk) + case blob.OpMoveBlock: + block, ok := op.Data["block"].(string) + if !ok || block == "" { + return fmt.Errorf("missing block in move op") + } - e.cids = append(e.cids, rec.CID) - e.changes = append(e.changes, rec.Data) + parent, _ := op.Data["parent"].(string) + + leftOriginRaw, _ := op.Data["leftOrigin"].(string) + refID, err := decodeOpID(leftOriginRaw) + if err != nil { + return fmt.Errorf("failed to decode move left origin op id: %w", err) + } + // TODO(burdiyan): Get rid of this self trick. + if refID.Ts == 0 && refID.Origin == "self" { + refID.Ts = ts + refID.Origin = origin + } + + if err := e.tree.Integrate(opid, parent, block, refID); err != nil { + return err + } + } + } + + e.cids = append(e.cids, c) + e.changes = append(e.changes, ch) e.deps = append(e.deps, nil) e.rdeps = append(e.rdeps, nil) - e.heads[rec.CID] = struct{}{} + e.heads[c] = struct{}{} curIdx := len(e.changes) - 1 - e.applied[rec.CID] = curIdx + e.applied[c] = curIdx // One more pass through the deps to update the internal DAG structure, // and update the heads of the current version. // To avoid corrupting the entity state we shouldn't do this in the first loop we did. - for i, dep := range rec.Data.Deps { + for i, dep := range ch.Deps { // If any of the deps was a head, then it's no longer the case. delete(e.heads, dep) @@ -319,7 +454,7 @@ func (e *Entity) ApplyChange(rec blob.ChangeRecord) error { // a ← b ← c ← d // ↖ // e -func (e *Entity) Deps() []cid.Cid { +func (e *docCRDT) Deps() []cid.Cid { if len(e.heads) == 0 { return nil } @@ -417,38 +552,28 @@ func addUnique(in []int, v int) []int { return slices.Insert(in, targetIndex, v) } -// OriginFromCID creates a CRDT origin from the last 8 chars of the hash. -// Most of the time it's not needed, because HLC is very unlikely to collide. -func OriginFromCID(c cid.Cid) string { - if !c.Defined() { - return "" - } - - str, err := c.StringOfBase(multibase.Base58BTC) - if err != nil { - panic(err) +// prepareChange to be applied later. +func (e *docCRDT) prepareChange(ts time.Time, signer core.KeyPair, ops []blob.Op) (hb blob.Encoded[*blob.Change], err error) { + var genesis cid.Cid + if len(e.cids) > 0 { + genesis = e.cids[0] } - return str[len(str)-9:] -} -// NextTimestamp returns the next timestamp from the HLC. -func (e *Entity) NextTimestamp() hlc.Timestamp { - return e.maxClock.MustNow() -} + var depth int -// CreateChange entity creating a change blob, and applying it to the internal state. -func (e *Entity) CreateChange(action string, ts hlc.Timestamp, signer core.KeyPair, payload map[string]any) (hb blob.Encoded[*blob.Change], err error) { - hb, err = blob.NewChange(signer, maps.Keys(e.heads), action, payload, int64(ts)) - if err != nil { - return hb, err - } - - rec := blob.ChangeRecord{ - CID: hb.CID, - Data: hb.Decoded, + deps := maps.Keys(e.heads) + // Ensure we don't use empty non-nil slice, which would leak into the encoded format. + if len(deps) == 0 { + deps = nil + } else { + for _, dep := range deps { + depth = max(depth, e.changes[e.applied[dep]].Depth) + } + depth++ } - if err := e.ApplyChange(rec); err != nil { + hb, err = blob.NewChange(signer, genesis, deps, depth, ops, ts) + if err != nil { return hb, err } @@ -503,12 +628,3 @@ func NewUnforgeableID(prefix string, author core.Principal, nonce []byte, ts int // But it should have enough collision resistance for our purpose. return prefix + base[len(base)-hashSize:], nonce } - -func verifyUnforgeableID(id blob.IRI, prefix int, owner core.Principal, nonce []byte, ts int64) error { - id2, _ := NewUnforgeableID(string(id[:prefix]), owner, nonce, ts) - if id2 != string(id) { - return fmt.Errorf("failed to verify unforgeable ID want=%q got=%q", id, id2) - } - - return nil -} diff --git a/backend/api/documents/v3alpha/docmodel/crdt_block_tree.go b/backend/api/documents/v3alpha/docmodel/crdt_block_tree.go new file mode 100644 index 00000000..f6b39399 --- /dev/null +++ b/backend/api/documents/v3alpha/docmodel/crdt_block_tree.go @@ -0,0 +1,454 @@ +package docmodel + +import ( + "fmt" + "iter" + "math" + "seed/backend/util/btree" + "strings" +) + +type moveEffect byte + +const ( + moveEffectNone moveEffect = 0 + moveEffectCreated moveEffect = 1 + moveEffectMoved moveEffect = 2 +) + +const TrashNodeID = "◊" + +type moveRecord struct { + OpID opID + Parent string + Block string + Ref opID +} + +type treeOpSet struct { + // Moved operations sorted by their opIDs. + log *btree.Map[opID, moveRecord] + + // Parent block -> list of children blocks. + sublists *btree.Map[string, *rgaList[string]] +} + +func newTreeOpSet() *treeOpSet { + opset := &treeOpSet{ + log: btree.New[opID, moveRecord](8, opID.Compare), + sublists: btree.New[string, *rgaList[string]](8, strings.Compare), + } + + // Create initial lists for root and trash subtrees. + opset.sublists.Set("", newRGAList[string]()) + opset.sublists.Set(TrashNodeID, newRGAList[string]()) + + return opset +} + +func (opset *treeOpSet) Copy() *treeOpSet { + cpy := &treeOpSet{ + log: opset.log.Copy(), + sublists: opset.sublists.Copy(), + } + + // TODO(burdiyan): improve on this somehow. + // Make sure the original sublists are not modified during mutations. + for k, v := range cpy.sublists.Items() { + cpy.sublists.Set(k, v.Copy()) + } + + return cpy +} + +func (opset *treeOpSet) Integrate(opID opID, parent, block string, refID opID) error { + if _, ok := opset.log.Get(opID); ok { + return fmt.Errorf("duplicate move op ID: %v", opID) + } + + subtree, ok := opset.sublists.Get(parent) + if !ok { + return fmt.Errorf("parent '%s' not found in tree", parent) + } + + if err := subtree.Integrate(opID, refID, block); err != nil { + return fmt.Errorf("failed to integrate move operation (block=%s parent=%s ref=%v): %w", block, parent, refID, err) + } + + // We need to create a subtree for every block. + if _, ok := opset.sublists.Get(block); !ok { + if opset.sublists.Set(block, newRGAList[string]()) { + panic("BUG: duplicate subtree for block " + block) + } + } + + move := moveRecord{ + OpID: opID, + Block: block, + Parent: parent, + Ref: refID, + } + + if opset.log.Set(opID, move) { + panic(fmt.Errorf("BUG: duplicate move op ID: %v", opID)) + } + + return nil +} + +func (opset *treeOpSet) State() *blockTreeState { + state := &blockTreeState{ + blocks: btree.New[string, blockState](8, strings.Compare), + opSet: opset.Copy(), + invisibleMoves: btree.New[opID, struct{}](8, opID.Compare), + } + + for opid, move := range opset.log.Items() { + if state.isAncestor(move.Block, move.Parent) { + state.invisibleMoves.Set(opid, struct{}{}) + continue + } + + prev, replaced := state.blocks.Swap(move.Block, blockState{Parent: move.Parent, Position: move.OpID}) + if replaced { + state.invisibleMoves.Set(prev.Position, struct{}{}) + } + } + + return state +} + +type blockState struct { + Parent string + Position opID +} + +type blockTreeState struct { + // Copy of the original opset. + opSet *treeOpSet + blocks *btree.Map[string, blockState] + invisibleMoves *btree.Map[opID, struct{}] +} + +// isAncestor returns checks if a is an ancestor of b. +func (state *blockTreeState) isAncestor(a, b string) bool { + n, ok := state.blocks.Get(b) + for { + if !ok || n.Parent == "" || n.Parent == TrashNodeID { + return false + } + + if n.Parent == a { + return true + } + + n, ok = state.blocks.Get(n.Parent) + } +} + +type blockPair struct { + Parent string + Child string +} + +// DFT does depth-first traversal of the block tree starting from the root. +// It returns a sequence of (parent, block) pairs. +func (state *blockTreeState) DFT() iter.Seq[blockPair] { + return func(yield func(blockPair) bool) { + state.walk("", yield) + } +} + +func (state *blockTreeState) walk(parent string, yield func(blockPair) bool) bool { + children := state.opSet.sublists.GetMaybe(parent) + if children == nil || children.items.Len() == 0 { + return true + } + + for _, slot := range children.items.Items() { + if _, ok := state.invisibleMoves.Get(slot.ID); ok || slot.IsDeleted { + continue + } + + if !yield(blockPair{Parent: parent, Child: slot.Value}) { + break + } + + if !state.walk(slot.Value, yield) { + return false + } + } + + return true +} + +func (state *blockTreeState) Mutate() *blockTreeMutation { + dirtyState := &blockTreeState{ + blocks: state.blocks.Copy(), + opSet: state.opSet.Copy(), + invisibleMoves: state.invisibleMoves.Copy(), + } + + return &blockTreeMutation{ + initial: state, + dirty: dirtyState, + } +} + +type blockTreeMutation struct { + initial *blockTreeState + dirty *blockTreeState + counter int + done bool +} + +func (mut *blockTreeMutation) Move(parent, block, left string) (moveEffect, error) { + if mut.done { + panic("BUG: nil mutation") + } + + if block == "" { + return moveEffectNone, fmt.Errorf("block must not be empty") + } + + if block == left { + return moveEffectNone, fmt.Errorf("block and left must not be the same") + } + + if left == TrashNodeID { + panic("BUG: trash can't be left") + } + + if parent != "" && left != "" && parent == left { + return moveEffectNone, fmt.Errorf("parent and left must not be the same") + } + + // Check if parent is in the tree. + if parent != "" && parent != TrashNodeID { + if _, ok := mut.dirty.blocks.Get(parent); !ok { + return moveEffectNone, fmt.Errorf("desired parent block %s is not in the tree", parent) + } + } + + // Preventing cycles. + if mut.dirty.isAncestor(block, parent) { + return moveEffectNone, fmt.Errorf("cycle detected: block %s is ancestor of %s", block, parent) + } + + leftState, ok := mut.dirty.blocks.Get(left) + if !ok { + if left == "" { + leftState = blockState{Parent: parent} + } else { + return moveEffectNone, fmt.Errorf("left block '%s' not found in tree", left) + } + } + + if leftState.Parent != parent { + return moveEffectNone, fmt.Errorf("left block '%s' is not a child of parent '%s'", left, parent) + } + + me := moveEffectCreated + curState, ok := mut.dirty.blocks.Get(block) + newState := blockState{ + Parent: parent, + Position: newOpID(math.MaxInt64, "\xFF\xFF\xFF\xFF", mut.counter), + } + if ok { + me = moveEffectMoved + + siblings := mut.dirty.opSet.sublists.GetMaybe(curState.Parent) + fracdex, ok := siblings.applied.Get(curState.Position) + if !ok { + panic("BUG: existing block is not found among supposed parent's children") + } + + // We need to check whether the block is already in the desired position, + // i.e. it already has the same parent, and the block to the left of it is the desired left. + if curState.Parent == parent { + // We check the items to the left of the current position of our block, + // to see if it's already the desired left block. + for k, v := range siblings.items.SeekReverse(fracdex) { + if k == fracdex { + continue + } + if v.IsDeleted { + continue + } + if _, ok := mut.dirty.invisibleMoves.Get(v.ID); ok { + continue + } + if v.Value == left { + return moveEffectNone, nil + } + // No need to iterate further than the first non-deleted left sibling. + break + } + } + + // Mark the previous position of the block as deleted. + // TODO: If it was created by our own transaction – just delete it. + curListItem := siblings.items.GetMaybe(fracdex) + curListItem.IsDeleted = true + siblings.items.Set(fracdex, curListItem) + } + + mut.dirty.blocks.Set(block, newState) + + mut.counter++ + + if err := mut.dirty.opSet.Integrate(newState.Position, parent, block, leftState.Position); err != nil { + return moveEffectNone, err + } + + return me, nil +} + +func (mut *blockTreeMutation) Commit(ts int64, origin string) iter.Seq[moveRecord] { + // We iterate the state of the block tree in a breadth-first order, + // and we clean up all the moves we've made, such that redundant moves are not included. + + // TODO(burdian): improve detecting operations created by our mutation. + isOurs := func(opID opID) bool { + return opID.Ts == math.MaxInt64 && opID.Origin == "\xFF\xFF\xFF\xFF" + } + + type queueItem struct { + Block string + Children *rgaList[string] + } + + return func(yield func(moveRecord) bool) { + defer func() { + // Make sure after the commit the mutation is not used anymore. + // We want any further usage to panic. + mut.done = true + }() + + var ( + queue = []queueItem{{Block: "", Children: mut.dirty.opSet.sublists.GetMaybe("")}} + counter int + ) + + for len(queue) > 0 { + sublist := queue[0] + queue = queue[1:] + var last rgaItem[string] + + for _, block := range sublist.Children.items.Items() { + if children, ok := mut.dirty.opSet.sublists.Get(block.Value); ok && children.items.Len() > 0 { + queue = append(queue, queueItem{Block: block.Value, Children: children}) + } + + if !isOurs(block.ID) { + last = block + continue + } + + if block.IsDeleted { + continue + } + + mr := moveRecord{ + OpID: newOpID(ts, origin, counter), + Parent: sublist.Block, + Block: block.Value, + } + + if isOurs(last.ID) { + mr.Ref = newOpID(ts, origin, counter-1) + } else { + mr.Ref = last.ID + } + + // Check if the current position of the block is the same as initial. + initialPos, ok := mut.initial.findLogicalPosition(block.Value) + if ok { + dirtyPos, ok := mut.dirty.findLogicalPosition(block.Value) + if ok && initialPos.Parent == dirtyPos.Parent && initialPos.Left == dirtyPos.Left { + continue + } + + } + + if !yield(mr) { + break + } + + last = block + counter++ + } + } + + deleted, ok := mut.dirty.opSet.sublists.Get(TrashNodeID) + if !ok { + panic("BUG: no trash sublist") + } + + for _, block := range deleted.items.Items() { + if !isOurs(block.ID) || block.IsDeleted { + continue + } + + // If currently deleted block wasn't in the initial state, + // then we can safely ignore it, because it was created by our own transaction. + if _, ok := mut.initial.blocks.Get(block.Value); !ok { + continue + } + + mr := moveRecord{ + OpID: newOpID(ts, origin, counter), + Parent: TrashNodeID, + Block: block.Value, + Ref: opID{}, + } + if !yield(mr) { + break + } + counter++ + } + } +} + +type logicalPosition struct { + Parent string + Left string +} + +func (state *blockTreeState) findLogicalPosition(block string) (lp logicalPosition, ok bool) { + bs, ok := state.blocks.Get(block) + if !ok { + return lp, false + } + + siblings, ok := state.opSet.sublists.Get(bs.Parent) + if !ok { + return lp, false + } + + fracdex, ok := siblings.applied.Get(bs.Position) + if !ok { + return lp, false + } + + lp.Parent = bs.Parent + + for k, v := range siblings.items.SeekReverse(fracdex) { + if k == fracdex { + continue + } + if v.IsDeleted { + continue + } + if _, ok := state.invisibleMoves.Get(v.ID); ok { + continue + } + + lp.Left = v.Value + + // No need to iterate further than the first non-deleted left sibling. + break + } + + return lp, true +} diff --git a/backend/api/documents/v3alpha/docmodel/crdt_block_tree_test.go b/backend/api/documents/v3alpha/docmodel/crdt_block_tree_test.go new file mode 100644 index 00000000..1d39fdd6 --- /dev/null +++ b/backend/api/documents/v3alpha/docmodel/crdt_block_tree_test.go @@ -0,0 +1,117 @@ +package docmodel + +import ( + "seed/backend/util/must" + "slices" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestCRDTBlockTree_Smoke(t *testing.T) { + opset := newTreeOpSet() + + mut := opset.State().Mutate() + require.Equal(t, moveEffectCreated, must.Do2(mut.Move("", "b1", ""))) + require.Equal(t, moveEffectCreated, must.Do2(mut.Move("", "b2", "b1"))) + require.Equal(t, moveEffectCreated, must.Do2(mut.Move("", "b3", "b2"))) + require.Equal(t, moveEffectCreated, must.Do2(mut.Move("b1", "b1.1", ""))) + require.Equal(t, moveEffectCreated, must.Do2(mut.Move("b1", "b1.0", ""))) + require.Equal(t, moveEffectMoved, must.Do2(mut.Move("b1", "b2", "b1.1"))) + require.Equal(t, moveEffectMoved, must.Do2(mut.Move("b1", "b2", "b1.0"))) + require.Equal(t, moveEffectMoved, must.Do2(mut.Move("b1", "b2", "b1.1"))) + + /* + - b1 + - b1.0 + - b1.1 + - b2 + - b3 + */ + + wantOps := []moveRecord{ + {OpID: newOpID(12345, "testing", 0), Parent: "", Block: "b1", Ref: opID{}}, + {OpID: newOpID(12345, "testing", 1), Parent: "", Block: "b3", Ref: newOpID(12345, "testing", 0)}, + {OpID: newOpID(12345, "testing", 2), Parent: "b1", Block: "b1.0", Ref: opID{}}, + {OpID: newOpID(12345, "testing", 3), Parent: "b1", Block: "b1.1", Ref: newOpID(12345, "testing", 2)}, + {OpID: newOpID(12345, "testing", 4), Parent: "b1", Block: "b2", Ref: newOpID(12345, "testing", 3)}, + } + + gotOps := slices.Collect(mut.Commit(12345, "testing")) + + require.Equal(t, wantOps, gotOps, "committed mutation moves must match") + + require.Equal(t, 0, opset.log.Len(), "mutation must operate on a copy of opset") + for _, children := range opset.sublists.Items() { + require.Equal(t, 0, children.items.Len(), "mutation must operate on a copy of opset") + } + + // Apply committed ops to the original state. + for _, op := range gotOps { + require.NoError(t, opset.Integrate(op.OpID, op.Parent, op.Block, op.Ref)) + } + + wantTree := []blockPair{ + {"", "b1"}, + {"b1", "b1.0"}, + {"b1", "b1.1"}, + {"b1", "b2"}, + {"", "b3"}, + } + gotTree := slices.Collect(opset.State().DFT()) + require.Equal(t, wantTree, gotTree, "tree after first set of moves must match") + + { + /* + - b3 + - b1 + - b1.0 + - b1.2 + - b1.1 + */ + + mut := opset.State().Mutate() + require.Equal(t, moveEffectCreated, must.Do2(mut.Move("b1", "b1.2", "b1.0"))) + require.Equal(t, moveEffectMoved, must.Do2(mut.Move("", "b3", ""))) + require.Equal(t, moveEffectMoved, must.Do2(mut.Move(TrashNodeID, "b2", ""))) + require.Equal(t, moveEffectCreated, must.Do2(mut.Move("", "b4", ""))) + require.Equal(t, moveEffectMoved, must.Do2(mut.Move("", "b4", "b3"))) + require.Equal(t, moveEffectMoved, must.Do2(mut.Move("b1", "b4", "b1.2"))) + require.Equal(t, moveEffectMoved, must.Do2(mut.Move("b1", "b4", "b1.1"))) + require.Equal(t, moveEffectMoved, must.Do2(mut.Move(TrashNodeID, "b4", ""))) + + // // Move around the existing node and put it back in the same logical place. + // // This should not create new moves. + require.Equal(t, moveEffectMoved, must.Do2(mut.Move(TrashNodeID, "b1.0", ""))) + require.Equal(t, moveEffectMoved, must.Do2(mut.Move("", "b1.0", ""))) + require.Equal(t, moveEffectMoved, must.Do2(mut.Move("", "b1.0", "b1"))) + require.Equal(t, moveEffectMoved, must.Do2(mut.Move("", "b1.0", "b3"))) + require.Equal(t, moveEffectMoved, must.Do2(mut.Move("b1", "b1.0", ""))) + + wantOps := []moveRecord{ + {OpID: newOpID(12346, "alice", 0), Parent: "", Block: "b3", Ref: opID{}}, + {OpID: newOpID(12346, "alice", 1), Parent: "b1", Block: "b1.2", Ref: newOpID(12345, "testing", 2)}, + {OpID: newOpID(12346, "alice", 2), Parent: TrashNodeID, Block: "b2", Ref: opID{}}, + } + + gotOps := slices.Collect(mut.Commit(12346, "alice")) + + // _ = wantOps + // _ = gotOps + require.Equal(t, wantOps, gotOps, "committed mutation moves must match") + + for _, op := range gotOps { + require.NoError(t, opset.Integrate(op.OpID, op.Parent, op.Block, op.Ref)) + } + + wantTree := []blockPair{ + {"", "b3"}, + {"", "b1"}, + {"b1", "b1.0"}, + {"b1", "b1.2"}, + {"b1", "b1.1"}, + } + gotTree := slices.Collect(opset.State().DFT()) + require.Equal(t, wantTree, gotTree, "tree after second set of moves must match") + } +} diff --git a/backend/api/documents/v3alpha/docmodel/crdt_list.go b/backend/api/documents/v3alpha/docmodel/crdt_list.go new file mode 100644 index 00000000..a983582f --- /dev/null +++ b/backend/api/documents/v3alpha/docmodel/crdt_list.go @@ -0,0 +1,104 @@ +package docmodel + +import ( + "fmt" + "iter" + "seed/backend/util/btree" + "strings" + + "roci.dev/fracdex" +) + +var errCausalityViolation = fmt.Errorf("causality violation") + +type rgaItem[T any] struct { + ID opID + Ref opID + Value T + IsDeleted bool +} + +var zeroOpID = opID{} + +type rgaList[T any] struct { + applied *btree.Map[opID, string] // opID => fracdex + items *btree.Map[string, rgaItem[T]] // fracdex => rgaItem +} + +func newRGAList[T any]() *rgaList[T] { + return &rgaList[T]{ + applied: btree.New[opID, string](8, opID.Compare), + items: btree.New[string, rgaItem[T]](8, strings.Compare), + } +} + +// Copy returns a structurally-shared copy of the list. +func (l *rgaList[T]) Copy() *rgaList[T] { + return &rgaList[T]{ + applied: l.applied.Copy(), + items: l.items.Copy(), + } +} + +func (l *rgaList[T]) Integrate(id, ref opID, v T) error { + if _, ok := l.applied.Get(id); ok { + return fmt.Errorf("duplicate op ID in the list") + } + + var left string + if ref != zeroOpID { + refFracdex, ok := l.applied.Get(ref) + if !ok { + return fmt.Errorf("%w: ref op %v is not found", errCausalityViolation, ref) + } + left = refFracdex + } + + var right string + for k, v := range l.items.Seek(left) { + // Seek returns the pivot item first. + if k == left { + continue + } + + // RGA rules: skip over any elements with a greater ID to the right of our desired insertion point. + if v.ID.Compare(id) > 0 { + left = k + continue + } else { + right = k + break + } + } + + newPos, err := fracdex.KeyBetween(left, right) + if err != nil { + return err + } + + newItem := rgaItem[T]{ID: id, Ref: ref, Value: v} + + if l.items.Set(newPos, newItem) { + panic("BUG: duplicate fracdex") + } + + if l.applied.Set(id, newPos) { + panic("BUG: duplicate op ID") + } + + return nil +} + +func (l *rgaList[T]) ValuesAlive() iter.Seq[T] { + return func(yield func(T) bool) { + for _, v := range l.items.Items() { + if v.IsDeleted { + continue + } + + if !yield(v.Value) { + break + } + } + } +} diff --git a/backend/api/documents/v3alpha/docmodel/crdt_list_test.go b/backend/api/documents/v3alpha/docmodel/crdt_list_test.go new file mode 100644 index 00000000..35897792 --- /dev/null +++ b/backend/api/documents/v3alpha/docmodel/crdt_list_test.go @@ -0,0 +1,82 @@ +package docmodel + +import ( + "errors" + "slices" + "strconv" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestCRDTList(t *testing.T) { + in := []rgaItem[string]{ + {ID: newOpID(1, "alice", 0), Ref: opID{}, Value: "A"}, + {ID: newOpID(1, "alice", 1), Ref: newOpID(1, "alice", 0), Value: "B"}, + {ID: newOpID(1, "alice", 2), Ref: newOpID(1, "alice", 1), Value: "C"}, + + {ID: newOpID(1, "bob", 0), Ref: opID{}, Value: "X"}, + {ID: newOpID(1, "bob", 1), Ref: newOpID(1, "bob", 0), Value: "Y"}, + {ID: newOpID(1, "bob", 2), Ref: newOpID(1, "bob", 1), Value: "Z"}, + } + + want := []string{"X", "Y", "Z", "A", "B", "C"} + + for i, perm := range permute(in) { + t.Run(strconv.Itoa(i), func(t *testing.T) { + l := newRGAList[string]() + for _, item := range perm { + if err := l.Integrate(item.ID, item.Ref, item.Value); err != nil { + if errors.Is(err, errCausalityViolation) { + // Permutations are expected to violate causality, so we ignore those errors. + return + } + t.Fatalf("Integrate failed: %v", err) + } + } + got := slices.Collect(l.ValuesAlive()) + require.Equal(t, want, got) + }) + } +} + +func permute[T any](arr []T) [][]T { + n := len(arr) + var res [][]T + + // c is the control array that keeps track of the swaps + c := make([]int, n) + + // Add the initial permutation + perm := make([]T, n) + copy(perm, arr) + res = append(res, perm) + + i := 0 + for i < n { + if c[i] < i { + // Swap according to whether i is even or odd + if i%2 == 0 { + arr[0], arr[i] = arr[i], arr[0] + } else { + arr[c[i]], arr[i] = arr[i], arr[c[i]] + } + + // Add the current permutation to the result + perm := make([]T, n) + copy(perm, arr) + res = append(res, perm) + + // Increment the control array + c[i] += 1 + // Reset i + i = 0 + } else { + // Reset c[i] and move to the next position + c[i] = 0 + i++ + } + } + + return res +} diff --git a/backend/api/documents/v3alpha/docmodel/crdt_mvreg.go b/backend/api/documents/v3alpha/docmodel/crdt_mvreg.go new file mode 100644 index 00000000..df29f5c8 --- /dev/null +++ b/backend/api/documents/v3alpha/docmodel/crdt_mvreg.go @@ -0,0 +1,42 @@ +package docmodel + +import "seed/backend/util/btree" + +type mvRegValue[V any] struct { + Value V + Preds []opID +} + +// mvReg is a multi-value register CRDT. +type mvReg[V any] struct { + state *btree.Map[opID, mvRegValue[V]] +} + +func newMVReg[V any]() *mvReg[V] { + return &mvReg[V]{ + state: btree.New[opID, mvRegValue[V]](8, opID.Compare), + } +} + +func (s *mvReg[V]) GetLatestOK() (v V, ok bool) { + _, vv, ok := s.state.GetAt(s.state.Len() - 1) + return vv.Value, ok +} + +func (s *mvReg[V]) GetLatest() V { + v, _ := s.GetLatestOK() + return v +} + +func (s *mvReg[V]) GetLatestWithID() (id opID, v V, ok bool) { + id, vv, ok := s.state.GetAt(s.state.Len() - 1) + return id, vv.Value, ok +} + +func (s *mvReg[V]) Set(oid opID, v V) { + preds := s.state.Keys() + s.state.Clear() + if s.state.Set(oid, mvRegValue[V]{Value: v, Preds: preds}) { + panic("BUG: multiple values with the same op id") + } +} diff --git a/backend/api/documents/v3alpha/docmodel/docmodel.go b/backend/api/documents/v3alpha/docmodel/docmodel.go index 686834c3..d6000976 100644 --- a/backend/api/documents/v3alpha/docmodel/docmodel.go +++ b/backend/api/documents/v3alpha/docmodel/docmodel.go @@ -2,23 +2,21 @@ package docmodel import ( "context" - "encoding/json" + "errors" "fmt" + "iter" + "maps" "net/url" "reflect" "seed/backend/blob" "seed/backend/core" documents "seed/backend/genproto/documents/v3alpha" - "seed/backend/hlc" - "seed/backend/util/colx" + "seed/backend/util/cclock" + "slices" "sort" - "strings" - "github.com/ipfs/boxo/blockstore" - blocks "github.com/ipfs/go-block-format" "github.com/ipfs/go-cid" "github.com/multiformats/go-multibase" - "google.golang.org/protobuf/encoding/protojson" "google.golang.org/protobuf/types/known/timestamppb" ) @@ -27,29 +25,22 @@ import ( // Document is a mutable document. type Document struct { - parent *Document - e *Entity - tree *treeCRDT - mut *treeMutation - patch map[string]any - done bool - nextHLC hlc.Timestamp + crdt *docCRDT origins map[string]cid.Cid // map of abbreviated origin hashes to actual cids; workaround, should not be necessary. + + // Bellow goes the data for the ongoing dirty mutation. + // Document can only be mutated once, and then must be thrown away. + + dirty bool + mut *blockTreeMutation + done bool // Index for blocks that we've created in this change. createdBlocks map[string]struct{} // Blocks that we've deleted in this change. deletedBlocks map[string]struct{} -} -func (doc *Document) Parent() *Document { - return doc.parent -} - -func (doc *Document) SetParent(parent *Document) { - if doc.parent != nil { - panic("BUG: parent doc is already set") - } - doc.parent = parent + dirtyBlocks map[string]blob.Block // BlockID => BlockState. + dirtyMetadata map[string]any } // originFromCID creates a CRDT origin from the last 8 chars of the hash. @@ -66,72 +57,93 @@ func originFromCID(c cid.Cid) string { return str[len(str)-9:] } -// New creates a new mutable document. -func New(e *Entity, nextHLC hlc.Timestamp) (*Document, error) { +// New creates a new Document model. +func New(id blob.IRI, clock *cclock.Clock) (*Document, error) { + crdt := newCRDT(id, clock) + return newDoc(crdt) +} + +// newDoc creates a new mutable document. +func newDoc(crdt *docCRDT) (*Document, error) { dm := &Document{ - e: e, - tree: newTreeCRDT(), - patch: map[string]any{}, + crdt: crdt, origins: make(map[string]cid.Cid), createdBlocks: make(map[string]struct{}), deletedBlocks: make(map[string]struct{}), - nextHLC: nextHLC, } - for _, c := range e.cids { + for _, c := range crdt.cids { o := originFromCID(c) dm.origins[o] = c } - if err := dm.replayMoves(); err != nil { + return dm, nil +} + +// Checkout a historical version of the Document. +func (dm *Document) Checkout(heads []cid.Cid) (*Document, error) { + if dm.done { + panic("BUG: document is done") + } + + crdt2, err := dm.crdt.Checkout(heads) + if err != nil { return nil, err } - return dm, nil + dm2, err := newDoc(crdt2) + if err != nil { + return nil, err + } + + return dm2, nil } -func (dm *Document) replayMoves() (err error) { - dm.e.State().ForEachListChunk([]string{"moves"}, func(time int64, origin string, items []any) bool { - for idx, move := range items { - mm := move.(map[string]any) - block := mm["b"].(string) - parent := mm["p"].(string) - leftShadow := mm["l"].(string) - left, leftOrigin, _ := strings.Cut(leftShadow, "@") - if left != "" && leftOrigin == "" { - leftOrigin = origin - } - - if err = dm.tree.integrate(newOpID(origin, time, idx), block, parent, left, leftOrigin); err != nil { - err = fmt.Errorf("failed move %v: %w", move, err) - return false - } - } - return true - }) - if err != nil { - return fmt.Errorf("failed to replay previous moves: %w", err) +// ApplyChange to the state. Can only do that before any mutations were made. +func (dm *Document) ApplyChange(c cid.Cid, ch *blob.Change) error { + if dm.dirty { + return fmt.Errorf("cannot apply change to dirty state") } - return nil + return dm.applyChangeUnsafe(c, ch) +} + +func (dm *Document) applyChangeUnsafe(c cid.Cid, ch *blob.Change) error { + o := originFromCID(c) + dm.origins[o] = c + return dm.crdt.ApplyChange(c, ch) } // SetMetadata sets the title of the document. -func (dm *Document) SetMetadata(key, value string) error { - v, ok := dm.e.Get("metadata", key) - if ok && v.(string) == value { - return nil +func (dm *Document) SetMetadata(key, newValue string) error { + dm.dirty = true + if dm.dirtyMetadata == nil { + dm.dirtyMetadata = make(map[string]any) + } + + if reg := dm.crdt.stateMetadata[key]; reg != nil { + if newValue == reg.GetLatest() { + // If metadata key already has the same value in the committed CRDT state, + // we do nothing, and just in case clear the dirty metadata value if any. + delete(dm.dirtyMetadata, key) + return nil + } } - colx.ObjectSet(dm.patch, []string{"metadata", key}, value) + dm.dirtyMetadata[key] = newValue return nil } // DeleteBlock deletes a block. func (dm *Document) DeleteBlock(block string) error { - mut := dm.ensureMutation() - me, err := mut.move(block, TrashNodeID, "") + dm.dirty = true + mut, err := dm.ensureTreeMutation() + if err != nil { + return err + } + + me, err := mut.Move(TrashNodeID, block, "") if err != nil { return err } @@ -144,35 +156,50 @@ func (dm *Document) DeleteBlock(block string) error { } // ReplaceBlock replaces a block. -func (dm *Document) ReplaceBlock(blk *documents.Block) error { - if blk.Id == "" { +func (dm *Document) ReplaceBlock(blkpb *documents.Block) error { + dm.dirty = true + if blkpb.Id == "" { return fmt.Errorf("blocks must have ID") } - blockMap, err := blockToMap(blk) + if dm.dirtyBlocks == nil { + dm.dirtyBlocks = make(map[string]blob.Block) + } + + blk, err := BlockFromProto(blkpb) if err != nil { return err } - oldBlock, ok := dm.e.Get("blocks", blk.Id) - if ok && reflect.DeepEqual(oldBlock, blockMap) { - return nil + // Check if CRDT state already has the same value for block. + // If so, we do nothing, and remove any dirty state for this block. + if reg := dm.crdt.stateBlocks[blkpb.Id]; reg != nil { + oldValue, ok := reg.GetLatestOK() + if ok && reflect.DeepEqual(oldValue, blk) { + delete(dm.dirtyBlocks, blkpb.Id) + return nil + } } - colx.ObjectSet(dm.patch, []string{"blocks", blk.Id, "#map"}, blockMap) + dm.dirtyBlocks[blk.ID] = blk return nil } // MoveBlock moves a block. func (dm *Document) MoveBlock(block, parent, left string) error { + dm.dirty = true if parent == TrashNodeID { panic("BUG: use DeleteBlock to delete a block") } - mut := dm.ensureMutation() + mut, err := dm.ensureTreeMutation() + if err != nil { + return err + } - me, err := mut.move(block, parent, left) + // TODO(burdiyan): make the order of parent/block parameters consistent. + me, err := mut.Move(parent, block, left) if err != nil { return err } @@ -188,130 +215,124 @@ func (dm *Document) MoveBlock(block, parent, left string) error { return nil } -func (dm *Document) ensureMutation() *treeMutation { +func (dm *Document) ensureTreeMutation() (*blockTreeMutation, error) { + dm.dirty = true if dm.mut == nil { - dm.mut = dm.tree.mutate() + dm.mut = dm.crdt.tree.State().Mutate() } - return dm.mut + return dm.mut, nil } -// Change creates a change. -func (dm *Document) Change(kp core.KeyPair) (hb blob.Encoded[*blob.Change], err error) { +// SignChange creates a change. +// After this the Document instance must be discarded. The change must be applied to a different state. +func (dm *Document) SignChange(kp core.KeyPair) (hb blob.Encoded[*blob.Change], err error) { // TODO(burdiyan): we should make them reusable. if dm.done { return hb, fmt.Errorf("using already committed mutation") } - if dm.nextHLC == 0 { - panic("BUG: next HLC time is zero") - } - dm.done = true - dm.cleanupPatch() - - action := "Update" + ops := dm.cleanupPatch() - if len(dm.patch) == 0 { - dm.patch["isDraft"] = true + hb, err = dm.crdt.prepareChange(dm.crdt.clock.MustNow(), kp, ops) + if err != nil { + return hb, err } - // Make sure to remove the dummy field created in the initial draft change. - if len(dm.patch) > 1 { - delete(dm.patch, "isDraft") + if err := dm.applyChangeUnsafe(hb.CID, hb.Decoded); err != nil { + return hb, err } - return dm.e.CreateChange(action, dm.nextHLC, kp, dm.patch) + return hb, nil } // Ref creates a Ref blob for the current heads. func (dm *Document) Ref(kp core.KeyPair) (ref blob.Encoded[*blob.Ref], err error) { // TODO(hm24): make genesis detection more reliable. - genesis := dm.e.cids[0] + genesis := dm.crdt.cids[0] - if len(dm.e.heads) != 1 { + if len(dm.crdt.heads) != 1 { return ref, fmt.Errorf("TODO: creating refs for multiple heads is not supported yet") } - headCID := dm.e.cids[len(dm.e.cids)-1] - head := dm.e.changes[len(dm.e.cids)-1] - - return blob.NewRef(kp, genesis, dm.e.id, []cid.Cid{headCID}, head.Ts) -} + headCID := dm.crdt.cids[len(dm.crdt.cids)-1] + head := dm.crdt.changes[len(dm.crdt.cids)-1] -// Commit commits a change. -func (dm *Document) Commit(ctx context.Context, kp core.KeyPair, bs blockstore.Blockstore) (ebc blob.Encoded[*blob.Change], err error) { - ebc, err = dm.Change(kp) + space, path, err := dm.crdt.id.SpacePath() if err != nil { - return ebc, err + return ref, err } - ebr, err := dm.Ref(kp) - if err != nil { - return ebc, err - } + return blob.NewRef(kp, genesis, space, path, []cid.Cid{headCID}, head.Ts) +} - if err := bs.PutMany(ctx, []blocks.Block{ebc, ebr}); err != nil { - return ebc, err +func (dm *Document) cleanupPatch() []blob.Op { + if !dm.dirty { + return nil } - return ebc, nil -} + var ops []blob.Op -func (dm *Document) cleanupPatch() { - if dm.mut == nil { - return + // TODO(burdiyan): It's important to moves go first, + // because I was stupid enough to implement the block tree CRDT in isolation, + // so it's not aware of any other possible operations. + // Will fix this at some point. + if dm.mut != nil { + for move := range dm.mut.Commit(0, "self") { + ops = append(ops, blob.NewOpMoveBlock(move.Block, move.Parent, move.Ref.String())) + } } - var moves []any - dm.mut.forEachMove(func(block, parent, left, leftOrigin string) bool { - var l string - if left != "" { - l = left + "@" + leftOrigin - } - moves = append(moves, map[string]any{ - "b": block, - "p": parent, - "l": l, - }) - - return true - }) - - // If we have some moves after cleaning up, add them to the patch. - if moves != nil { - dm.patch["moves"] = map[string]any{ - "#list": map[string]any{ - "#ins": moves, - }, - } + metaKeys := slices.Collect(maps.Keys(dm.dirtyMetadata)) + slices.Sort(metaKeys) + + for _, key := range metaKeys { + ops = append(ops, blob.NewOpSetMetadata(key, dm.dirtyMetadata[key])) } // Remove state of those blocks that we created and deleted in the same change. for blk := range dm.deletedBlocks { if _, mustIgnore := dm.createdBlocks[blk]; mustIgnore { - colx.ObjectDelete(dm.patch, []string{"blocks", blk}) + delete(dm.dirtyBlocks, blk) continue } } - // Remove the blocks key from the patch if we end up with no blocks after cleanup. - if blocks, ok := dm.patch["blocks"].(map[string]any); ok { - if len(blocks) == 0 { - delete(dm.patch, "blocks") + dirtyBlockIDs := slices.Collect(maps.Keys(dm.dirtyBlocks)) + slices.Sort(dirtyBlockIDs) + for _, bid := range dirtyBlockIDs { + blk, ok := dm.dirtyBlocks[bid] + if !ok { + panic("BUG: dirty block not found") } + + ops = append(ops, blob.NewOpReplaceBlock(blk)) } + + return ops } -// Entity returns the underlying entity. -func (dm *Document) Entity() *Entity { - return dm.e +// NumChanges returns the number of changes in the current state of the document. +func (dm *Document) NumChanges() int { + return len(dm.crdt.cids) +} + +// BFTDeps returns a breadth-first traversal iterator for the document change DAG. +func (dm *Document) BFTDeps(start []cid.Cid) (iter.Seq2[int, blob.ChangeRecord], error) { + return dm.crdt.BFTDeps(start) +} + +// Heads returns the current leaf/head changes in the document history. +// I.e. it's the current version of the document. +func (dm *Document) Heads() map[cid.Cid]struct{} { + return dm.crdt.Heads() } // Hydrate hydrates a document. func (dm *Document) Hydrate(ctx context.Context) (*documents.Document, error) { - if len(dm.e.changes) == 0 { + if len(dm.crdt.changes) == 0 { return nil, fmt.Errorf("no changes in the entity") } @@ -319,36 +340,29 @@ func (dm *Document) Hydrate(ctx context.Context) (*documents.Document, error) { panic("BUG: can't hydrate a document with uncommitted changes") } - e := dm.e + e := dm.crdt first := e.changes[0] last := e.changes[len(e.changes)-1] // TODO(burdiyan): this is ugly and needs to be refactored. - u, err := url.Parse(string(e.ID())) + u, err := url.Parse(string(e.id)) if err != nil { return nil, err } - account := u.Host + space := u.Host path := u.Path docpb := &documents.Document{ - Account: account, + Account: space, Path: path, - Metadata: make(map[string]string), - CreateTime: timestamppb.New(hlc.Timestamp(first.Ts).Time()), + Metadata: e.GetMetadata(), + CreateTime: timestamppb.New(first.Ts), Version: e.Version().String(), } - docpb.UpdateTime = timestamppb.New(hlc.Timestamp(last.Ts).Time()) - - for _, key := range e.state.Keys("metadata") { - v, ok := e.state.GetAny("metadata", key).(string) - if ok && v != "" { - docpb.Metadata[key] = v - } - } + docpb.UpdateTime = timestamppb.New(last.Ts) // Loading editors is a bit cumbersome because we need to go over key delegations. { @@ -367,80 +381,108 @@ func (dm *Document) Hydrate(ctx context.Context) (*documents.Document, error) { } blk, ok := blockMap[parent] if !ok { - panic("BUG: no parent " + parent + " was found yet while iterating") + panic("BUG: no parent " + parent + " for child " + child.Block.Id) } blk.Children = append(blk.Children, child) } - dm.tree.mutate().walkDFT(func(m *move) bool { + for pair := range dm.crdt.tree.State().DFT() { // TODO(burdiyan): block revision would change only if block itself was changed. // If block is only moved it's revision won't change. Need to check if that's what we want. - mm, origin, ok := dm.e.State().GetWithOrigin("blocks", m.Block) - if !ok { - // If we got some moves but no block state - // we just skip them, we don't want to blow up here. - return true - } - oo := dm.origins[origin] - // if !oo.Defined() { - // oo = dm.oldDraft - // } + // If we got some moves but no block state + // we just skip them, we don't want to blow up here. + + bs := dm.crdt.stateBlocks[pair.Child] + if bs == nil { + continue + } - var blk *documents.Block - blk, err = blockFromMap(m.Block, oo.String(), mm.(map[string]any)) - if err != nil { - return false + opid, blk, ok := bs.GetLatestWithID() + if !ok { + continue } - child := &documents.BlockNode{Block: blk} - appendChild(m.Parent, child) - blockMap[m.Block] = child + oo := dm.origins[opid.Origin] + blkpb := BlockToProto(blk, oo) - return true - }) - if err != nil { - return nil, err + child := &documents.BlockNode{Block: blkpb} + appendChild(pair.Parent, child) + blockMap[pair.Child] = child } return docpb, nil } -func blockToMap(blk *documents.Block) (map[string]any, error) { - // This is a very bad way to convert something into a map, - // but mapstructure package could have problems here, - // because protobuf have peculiar encoding of oneof fields into JSON, - // which mapstructure doesn't know about. Although in fact we don't have - // any oneof fields in this structure, but just in case. - data, err := protojson.Marshal(blk) - if err != nil { - return nil, err +// BlockFromProto converts a protobuf block into our internal representation. +// It's largely the same, but we need a separate type for CBOR encoding which we use in the permanent data. +func BlockFromProto(b *documents.Block) (blob.Block, error) { + if b.Id == "" { + return blob.Block{}, errors.New("block ID is required") } - var v map[string]any - if err := json.Unmarshal(data, &v); err != nil { - return nil, err + if len(b.Attributes) == 0 { + b.Attributes = nil } - // We don't want those fields, because they can be inferred. - delete(v, "revision") - delete(v, "id") + return blob.Block{ + ID: b.Id, + Type: b.Type, + Text: b.Text, + Link: b.Link, + Attributes: b.Attributes, + Annotations: annotationsFromProto(b.Annotations), + }, nil +} + +func annotationsFromProto(in []*documents.Annotation) []blob.Annotation { + if in == nil { + return nil + } - return v, nil + out := make([]blob.Annotation, len(in)) + for i, a := range in { + out[i] = blob.Annotation{ + Type: a.Type, + Link: a.Link, + Attributes: a.Attributes, + Starts: a.Starts, + Ends: a.Ends, + } + } + + return out } -func blockFromMap(id, revision string, v map[string]any) (*documents.Block, error) { - data, err := json.Marshal(v) - if err != nil { - return nil, err +// BlockToProto converts our internal block representation into a protobuf block. +// It's largely the same, but we use CBOR in our permanent data, and we use protobuf in our API. +func BlockToProto(b blob.Block, revision cid.Cid) *documents.Block { + return &documents.Block{ + Id: b.ID, + Type: b.Type, + Text: b.Text, + Link: b.Link, + Attributes: b.Attributes, + Annotations: annotationsToProto(b.Annotations), + Revision: revision.String(), } +} - pb := &documents.Block{} - if err := protojson.Unmarshal(data, pb); err != nil { - return nil, err +func annotationsToProto(in []blob.Annotation) []*documents.Annotation { + if in == nil { + return nil + } + + out := make([]*documents.Annotation, len(in)) + for i, a := range in { + out[i] = &documents.Annotation{ + Type: a.Type, + Link: a.Link, + Attributes: a.Attributes, + Starts: a.Starts, + Ends: a.Ends, + } } - pb.Id = id - pb.Revision = revision - return pb, nil + return out } diff --git a/backend/api/documents/v3alpha/docmodel/docmodel_test.go b/backend/api/documents/v3alpha/docmodel/docmodel_test.go new file mode 100644 index 00000000..e2d95903 --- /dev/null +++ b/backend/api/documents/v3alpha/docmodel/docmodel_test.go @@ -0,0 +1,33 @@ +package docmodel + +import ( + "seed/backend/core/coretest" + "seed/backend/util/cclock" + "seed/backend/util/must" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestDocmodelSmoke(t *testing.T) { + alice := coretest.NewTester("alice").Account + + doc := must.Do2(New("mydoc", cclock.New())) + must.Do(doc.SetMetadata("title", "Hello")) + c1 := must.Do2(doc.SignChange(alice)) + + { + doc := must.Do2(New("mydoc", cclock.New())) + must.Do(doc.ApplyChange(c1.CID, c1.Decoded)) + must.Do(doc.SetMetadata("title", "Hello world")) + c2 := must.Do2(doc.SignChange(alice)) + + { + doc := must.Do2(New("mydoc", cclock.New())) + must.Do(doc.ApplyChange(c1.CID, c1.Decoded)) + must.Do(doc.ApplyChange(c2.CID, c2.Decoded)) + + require.Equal(t, map[string]string{"title": "Hello world"}, doc.crdt.GetMetadata()) + } + } +} diff --git a/backend/api/documents/v3alpha/docmodel/lseq/lseq.go b/backend/api/documents/v3alpha/docmodel/lseq/lseq.go new file mode 100644 index 00000000..f3421340 --- /dev/null +++ b/backend/api/documents/v3alpha/docmodel/lseq/lseq.go @@ -0,0 +1,85 @@ +// Package lseq provides a list CRDT with absolute position for elements. +// It's similar to the original LSEQ algorithm, but doesn't cause interleaving of elements. +// In fact, it's more similar to the RGA algorithm, but it generates absolute positions which can be compared lexicographically. +// See this article for more information: https://www.bartoszsypytkowski.com/non-interleaving-lseq. +package lseq + +import ( + "iter" + + "github.com/tidwall/btree" +) + +// Item of an LSEQ list. +type Item[T any] struct { + pos Position + value T +} + +// LSEQ is a list CRDT with absolute positions. +type LSEQ[T any] struct { + items *btree.BTreeG[Item[T]] + hint btree.PathHint +} + +// New creates a new LSEQ list. +func New[T any]() *LSEQ[T] { + return &LSEQ[T]{ + items: btree.NewBTreeGOptions( + func(a, b Item[T]) bool { + return a.pos.Cmp(b.pos) < 0 + }, + btree.Options{ + NoLocks: true, + Degree: 8, + }, + ), + } +} + +func (l *LSEQ[T]) maybePosAt(idx int) Position { + if idx < 0 || l.items.Len() == 0 || idx >= l.items.Len() { + return nil + } + + el, _ := l.items.GetAt(idx) + return el.pos +} + +// InsertAt inserts values at the specified index. +func (l *LSEQ[T]) InsertAt(idx int, origin uint64, values ...T) []Position { + if idx < 0 || idx > l.items.Len() { + panic("index out of bounds") + } + + left := l.maybePosAt(idx - 1) + right := l.maybePosAt(idx) + + out := make([]Position, len(values)) + + for i, value := range values { + pos := newPos(origin, left, right) + item := Item[T]{pos: pos, value: value} + l.items.SetHint(item, &l.hint) + left = pos + out[i] = pos + } + + return out +} + +// Values returns an in-order iterator for values. +func (l *LSEQ[T]) Values() iter.Seq[T] { + return func(yield func(T) bool) { + for i := range l.items.Len() { + it, ok := l.items.GetAt(i) + if !ok { + panic("BUG: items not found during iteration") + } + + if !yield(it.value) { + return + } + } + } +} diff --git a/backend/api/documents/v3alpha/docmodel/lseq/lseq_test.go b/backend/api/documents/v3alpha/docmodel/lseq/lseq_test.go new file mode 100644 index 00000000..b6a1f891 --- /dev/null +++ b/backend/api/documents/v3alpha/docmodel/lseq/lseq_test.go @@ -0,0 +1,73 @@ +package lseq + +import ( + "slices" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestLSEQ(t *testing.T) { + a := New[byte]() + b := New[byte]() + alice := uint64(10) + bob := uint64(20) + _ = bob + + a.InsertAt(0, alice, 'H', ' ', '!') + a.InsertAt(1, alice, 'i') + + merge(b, a) + + a.InsertAt(3, alice, 'M', 'o', 'm') + b.InsertAt(3, bob, 'D', 'a', 'd') + + merge(b, a) + merge(a, b) + + require.Equal(t, slices.Collect(a.Values()), slices.Collect(b.Values())) + require.Equal(t, "Hi MomDad!", string(slices.Collect(a.Values()))) + + a.InsertAt(a.items.Len(), alice, 'H', 'e', 'y') + + require.True(t, slices.IsSortedFunc(a.keys(), Position.Cmp)) + + want := []Position{ + {{10, 1}}, + {{10, 1}, {10, 1}}, + {{10, 2}}, + {{10, 2}, {10, 1}}, + {{10, 2}, {10, 2}}, + {{10, 2}, {10, 3}}, + {{10, 2}, {20, 1}}, + {{10, 2}, {20, 2}}, + {{10, 2}, {20, 3}}, + {{10, 3}}, + {{10, 4}}, + {{10, 5}}, + {{10, 6}}, + } + require.Equal(t, want, a.keys()) +} + +func merge[T any](dst, src *LSEQ[T]) { + for i := range src.items.Len() { + item, ok := src.items.GetAt(i) + if !ok { + continue + } + dst.items.SetHint(item, &dst.hint) + } +} + +func (l *LSEQ[T]) keys() []Position { + out := make([]Position, l.items.Len()) + for i := range l.items.Len() { + item, ok := l.items.GetAt(i) + if !ok { + continue + } + out[i] = item.pos + } + return out +} diff --git a/backend/api/documents/v3alpha/docmodel/lseq/position.go b/backend/api/documents/v3alpha/docmodel/lseq/position.go new file mode 100644 index 00000000..2f16d088 --- /dev/null +++ b/backend/api/documents/v3alpha/docmodel/lseq/position.go @@ -0,0 +1,126 @@ +package lseq + +import ( + "cmp" + "math" +) + +// Position is an absolute list position. +type Position []Segment + +// Cmp compares the two position. +func (p Position) Cmp(o Position) int { + al := len(p) + bl := len(o) + ml := min(al, bl) + + for i := range ml { + if p[i].Less(o[i]) { + return -1 + } + if p[i].Greater(o[i]) { + return +1 + } + } + + if al == bl { + return 0 + } + + return cmp.Compare(al, bl) +} + +// Segment is a tuple of origin/replica ID and a sequence number. +type Segment [2]uint64 + +// Origin part of the segment. +func (ps Segment) Origin() uint64 { + return ps[0] +} + +// Seq part of the segment. +func (ps Segment) Seq() uint64 { + return ps[1] +} + +// Less compares if this segment is less than the provided one. +func (ps Segment) Less(other Segment) bool { + ocmp := cmp.Compare(ps.Origin(), other.Origin()) + if ocmp == 0 { + return ps.Seq() < other.Seq() + } + return ocmp < 0 +} + +// Greater compares if this segment is greater than the provided one. +func (ps Segment) Greater(other Segment) bool { + ocmp := cmp.Compare(ps.Origin(), other.Origin()) + if ocmp == 0 { + return ps.Seq() > other.Seq() + } + return ocmp > 0 +} + +var maxSegment = Segment{math.MaxUint64, math.MaxUint64} + +func newPos(origin uint64, left, right Position) Position { + min := Position{{origin, 0}} + + lo := left + if lo == nil { + lo = min + } + + hi := right + if hi == nil { + hi = Position{maxSegment} + } + + var ( + sequence Position + i int + diffed bool + ) + for i < len(lo) && i < len(hi) { + l := lo[i] + r := hi[i] + n := Segment{l.Origin(), l.Seq() + 1} + + if r.Greater(n) { + if n.Origin() != origin { + sequence = append(sequence, l) + } else { + sequence = append(sequence, n) + diffed = true + break + } + } else { + sequence = append(sequence, l) + } + i++ + } + + minSeg := Segment{origin, 0} + for !diffed { + l := minSeg + if i < len(lo) { + l = lo[i] + } + + r := maxSegment + if i < len(hi) { + r = hi[i] + } + + n := Segment{origin, l.Seq() + 1} + if r.Greater(n) { + sequence = append(sequence, n) + diffed = true + } else { + sequence = append(sequence, l) + } + i++ + } + + return sequence +} diff --git a/backend/api/documents/v3alpha/docmodel/moves.go b/backend/api/documents/v3alpha/docmodel/moves.go deleted file mode 100644 index 738d065e..00000000 --- a/backend/api/documents/v3alpha/docmodel/moves.go +++ /dev/null @@ -1,528 +0,0 @@ -package docmodel - -import ( - "fmt" - - "github.com/tidwall/btree" - "golang.org/x/exp/maps" - "roci.dev/fracdex" -) - -type moveEffect byte - -const ( - moveEffectNone moveEffect = 0 - moveEffectCreated moveEffect = 1 - moveEffectMoved moveEffect = 2 -) - -const TrashNodeID = "◊" - -type opID struct { - Origin string - Ts int64 - Idx int -} - -func newOpID(origin string, ts int64, idx int) opID { - return opID{ - Origin: origin, - Ts: ts, - Idx: idx, - } -} - -func (o opID) Less(oo opID) bool { - if o.Ts < oo.Ts { - return true - } - - if o.Ts > oo.Ts { - return false - } - - if o.Origin < oo.Origin { - return true - } - - if o.Origin > oo.Origin { - return false - } - - if o.Idx == oo.Idx { - panic("BUG: duplicate move") - } - - return o.Idx < oo.Idx -} - -type treeCRDT struct { - log *btree.BTreeG[*move] - logHint btree.PathHint - - tree *btree.BTreeG[*move] - treeHint btree.PathHint - - origins map[[2]string]*move -} - -func newTreeCRDT() *treeCRDT { - ts := &treeCRDT{ - log: btree.NewBTreeGOptions((*move).ByID, btree.Options{NoLocks: true, Degree: 8}), - tree: btree.NewBTreeGOptions((*move).ByParentOrder, btree.Options{NoLocks: true, Degree: 8}), - origins: make(map[[2]string]*move), - } - - return ts -} - -func (state *treeCRDT) integrate(opID opID, block, parent, left, leftOrigin string) error { - origin := [2]string{block, opID.Origin} - if _, ok := state.origins[origin]; ok { - return fmt.Errorf("duplicate move operation per block and origin: %s@%s", block, opID.Origin) - } - - if left == TrashNodeID { - return fmt.Errorf("left must not be trash") - } - - if left != "" && leftOrigin == "" { - return fmt.Errorf("leftOrigin must be set if left is set") - } - - // find move in tree by parent and left and left origin - li, ri, err := state.findInsertionPoint(opID, parent, left, leftOrigin) - if err != nil { - return err - } - - idx, err := fracdex.KeyBetween(li, ri) - if err != nil { - return fmt.Errorf("failed to create fracdex between %s and %s: %w", li, ri, err) - } - - op := &move{ - OpID: opID, - Block: block, - Parent: parent, - Left: left, - LeftOrigin: leftOrigin, - Fracdex: idx, - } - - state.log.SetHint(op, &state.logHint) - state.tree.SetHint(op, &state.treeHint) - state.origins[origin] = op - - return nil -} - -func (state *treeCRDT) findInsertionPoint(opID opID, parent, block, origin string) (left string, right string, err error) { - pivot := &move{Parent: parent} - - var found *move - - if block == "" && origin == "" { - found = pivot - } - - state.tree.AscendHint(pivot, func(x *move) bool { - if x == pivot { - return true - } - - if x.Parent > parent { - return false - } - - if found == nil { - if x.Parent == parent && x.Block == block && x.OpID.Origin == origin { - found = x - left = x.Fracdex - } - return true - } - - // Following the RGA rules here. - // If item to the right of our initial insertion point is concurrent to our op, - // we skip over it. - - if x.OpID.Less(opID) { - right = x.Fracdex - return false - } - - found = x - left = x.Fracdex - return true - }, &state.treeHint) - - if found == nil { - return "", "", fmt.Errorf("block %s@%s not found under parent %s", block, origin, parent) - } - - return left, right, nil -} - -type treeMutation struct { - tree *btree.BTreeG[*move] - treeHint btree.PathHint - parents map[string]string - originalWinners *btree.Map[string, *move] - dirtyWinners *btree.Map[string, *move] - originalInvisibleMoves map[*move]struct{} - dirtyInvisibleMoves map[*move]struct{} -} - -func (state *treeCRDT) mutate() *treeMutation { - vt := &treeMutation{ - tree: state.tree.Copy(), - parents: make(map[string]string), - originalWinners: btree.NewMap[string, *move](16), - originalInvisibleMoves: make(map[*move]struct{}), - dirtyInvisibleMoves: make(map[*move]struct{}), - } - - state.log.Scan(func(x *move) bool { - lastMove, ok := vt.originalWinners.Get(x.Block) - if ok && x.OpID.Less(lastMove.OpID) { - panic("BUG: unsorted moves") - } - - if vt.isAncestor(x.Block, x.Parent) { - vt.originalInvisibleMoves[x] = struct{}{} - return true - } - - if lastMove != nil { - vt.originalInvisibleMoves[lastMove] = struct{}{} - } - - vt.originalWinners.Set(x.Block, x) - vt.parents[x.Block] = x.Parent - - return true - }) - - vt.dirtyWinners = vt.originalWinners.Copy() - vt.dirtyInvisibleMoves = maps.Clone(vt.originalInvisibleMoves) - - return vt -} - -func (mut *treeMutation) isAncestor(a, b string) bool { - c := mut.parents[b] - for { - if c == "" || c == TrashNodeID { - return false - } - - if c == a { - return true - } - - c = mut.parents[c] - } -} - -func (mut *treeMutation) move(block, parent, left string) (moveEffect, error) { - if block == "" { - return moveEffectNone, fmt.Errorf("block must not be empty") - } - - if block == left { - return moveEffectNone, fmt.Errorf("block and left must not be the same") - } - - if left == TrashNodeID { - panic("BUG: trash can't be left") - } - - if parent != "" && left != "" && parent == left { - return moveEffectNone, fmt.Errorf("parent and left must not be the same") - } - - // Check if parent is in the tree. - if parent != "" && parent != TrashNodeID { - if _, ok := mut.parents[parent]; !ok { - return moveEffectNone, fmt.Errorf("desired parent block %s is not in the tree", parent) - } - } - - // Preventing cycles. - if mut.isAncestor(block, parent) { - return moveEffectNone, fmt.Errorf("cycle detected: block %s is ancestor of %s", block, parent) - } - - // Check if the desired left is actually a child of the desired parent. - var currentLeft *move - if left != "" { - leftPos, ok := mut.dirtyWinners.Get(left) - if !ok { - return moveEffectNone, fmt.Errorf("left block %s is not in the tree", left) - } - - if leftPos.Parent != parent { - return moveEffectNone, fmt.Errorf("left block %s is not a child of parent %s", left, parent) - } - - currentLeft = leftPos - } else { - // Sentinel value for the beginning of the sublist. - currentLeft = &move{Parent: parent} - } - - // Checking if our move is actually a move or a create. - var me moveEffect - prevWinner, _ := mut.dirtyWinners.Get(block) - switch { - case prevWinner == nil: - me = moveEffectCreated - case prevWinner != nil: - // When we're moving to trash we don't care about the sibling order. - if prevWinner.Parent == TrashNodeID && parent == TrashNodeID { - return moveEffectNone, nil - } - - // If previous move of this block is our own move, we can safely delete it. - // Otherwise we mark the previous move as invisible. - if prevWinner.OpID.Origin == "" { - mut.tree.DeleteHint(prevWinner, &mut.treeHint) - delete(mut.dirtyInvisibleMoves, prevWinner) - delete(mut.originalInvisibleMoves, prevWinner) - } else { - mut.dirtyInvisibleMoves[prevWinner] = struct{}{} - } - - me = moveEffectMoved - default: - panic("BUG: invalid move case") - } - - mut.parents[block] = parent - - var rightIndex string - mut.tree.AscendHint(currentLeft, func(x *move) bool { - if x == currentLeft { - return true - } - - if x.Parent == parent { - rightIndex = x.Fracdex - } - - return false - }, &mut.treeHint) - - newIndex, err := fracdex.KeyBetween(currentLeft.Fracdex, rightIndex) - if err != nil { - return moveEffectNone, fmt.Errorf("failed to create fracdex for move %q %q %q: %w", block, parent, left, err) - } - - // Assemble the move. Preliminary moves don't need CRDT metadata. - m := &move{ - Block: block, - Parent: parent, - Fracdex: newIndex, - } - - // The new move we just created should be invisible if we look at the original state. - mut.originalInvisibleMoves[m] = struct{}{} - mut.tree.SetHint(m, &mut.treeHint) - mut.dirtyWinners.Set(block, m) - - // Maybe do the naive cleanup. We can only do it if we move within the same parent. - original, ok := mut.originalWinners.Get(block) - if !ok && m.Parent == TrashNodeID { - // If we're moving a block to trash, - // and this block didn't exist in the original tree, - // we can just discard this move all together. - delete(mut.dirtyInvisibleMoves, m) - delete(mut.originalInvisibleMoves, m) - mut.dirtyWinners.Delete(block) - mut.tree.DeleteHint(m, &mut.treeHint) - - return moveEffectMoved, nil - } - if ok && original.Parent == m.Parent { - currentLeft, currentLeftID := mut.visibleLeftSibling(m, mut.dirtyInvisibleMoves) - - // This assertion is probably not needed. - if _, ok := mut.originalInvisibleMoves[original]; ok { - panic("BUG: original winner is invisible") - } - - // If the visible left sibling of the original position is the same as the new move's left, - // then we know our new move didn't do anything. - originalLeft, originalLeftID := mut.visibleLeftSibling(original, mut.originalInvisibleMoves) - if originalLeft == currentLeft && originalLeftID.Origin == currentLeftID.Origin { - // This move is redundant with the original winner. - // Ignore this new move by making it invisible. - // And restore the original move. - delete(mut.dirtyInvisibleMoves, m) - delete(mut.dirtyInvisibleMoves, original) - delete(mut.originalInvisibleMoves, m) - mut.dirtyWinners.Set(block, original) - mut.tree.DeleteHint(m, &mut.treeHint) - mut.tree.SetHint(original, &mut.treeHint) - return moveEffectMoved, nil - } - } - - return me, nil -} - -func (mut *treeMutation) forEachMove(fn func(block, parent, left, leftOrigin string) bool) { - mut.walkDFT(func(m *move) bool { - // We only care about moves that we touched. - if m.OpID.Origin != "" { - return true - } - - if m.Parent == TrashNodeID { - panic("BUG: cleanup must only walk the visible block tree") - } - - currentLeft, currentLeftID := mut.visibleLeftSibling(m, mut.dirtyInvisibleMoves) - - return fn(m.Block, m.Parent, currentLeft, currentLeftID.Origin) - }) - - // Now walk the deleted blocks. - pivot := &move{Parent: TrashNodeID} - mut.tree.AscendHint(pivot, func(m *move) bool { - if m == pivot || m.Parent != pivot.Parent { - return true - } - - // We only care about our own moves. - if m.OpID.Origin != "" { - return true - } - - return fn(m.Block, m.Parent, "", "") - }, &mut.treeHint) -} - -func (mut *treeMutation) commit(origin string, ts int64, state *treeCRDT) (err error) { - if mut.tree == state.tree { - panic("BUG: mutation must not be applied on the same state") - } - - var idx int - mut.forEachMove(func(block, parent, left, leftOrigin string) bool { - // If we have a left but don't have origin, it's our own move, - // so we set it our own origin. - if left != "" && leftOrigin == "" { - leftOrigin = origin - } - - if err := state.integrate(newOpID(origin, ts, idx), block, parent, left, leftOrigin); err != nil { - err = fmt.Errorf("failed to integrate preliminary move (%s, %s, %s@%s): %w", block, parent, left, leftOrigin, err) - return false - } - - idx++ - return true - }) - - return err -} - -// walkDFT walks the visible tree in depth-first order. -func (mut *treeMutation) walkDFT(fn func(m *move) bool) { - var hint btree.PathHint - - pivot := &move{Fracdex: "~"} - - var stack []*move - - addChild := func(block string) { - pivot.Parent = block - mut.tree.DescendHint(pivot, func(x *move) bool { - if x == pivot { - return true - } - - if x.Parent != pivot.Parent { - return false - } - - if _, ok := mut.dirtyInvisibleMoves[x]; ok { - return true - } - - stack = append(stack, x) - - return true - }, &hint) - } - - addChild("") - - for len(stack) > 0 { - i := len(stack) - 1 - x := stack[i] - stack = stack[:i] - - if !fn(x) { - break - } - - addChild(x.Block) - } -} - -func (mut *treeMutation) visibleLeftSibling(m *move, invisible map[*move]struct{}) (blockID string, opid opID) { - mut.tree.DescendHint(m, func(x *move) bool { - if x == m { - return true - } - - if x.Parent != m.Parent { - return false - } - - if _, ok := invisible[x]; ok { - return true - } - - blockID = x.Block - opid = x.OpID - return false - }, &mut.treeHint) - - return blockID, opid -} - -type move struct { - OpID opID - Block string - Parent string - Left string - LeftOrigin string - Fracdex string -} - -func (m *move) Index() string { - if m == nil { - return "" - } - - return m.Fracdex -} - -func (m *move) ByParentOrder(mm *move) bool { - if m.Parent == mm.Parent { - if m.Fracdex == mm.Fracdex && m != mm { - panic(fmt.Errorf("BUG: duplicated fracdex within parent %+v %+v", m, mm)) - } - return m.Fracdex < mm.Fracdex - } - - return m.Parent < mm.Parent -} - -func (m *move) ByID(mm *move) bool { - return m.OpID.Less(mm.OpID) -} diff --git a/backend/api/documents/v3alpha/docmodel/moves_test.go b/backend/api/documents/v3alpha/docmodel/moves_test.go deleted file mode 100644 index ffd8fbe1..00000000 --- a/backend/api/documents/v3alpha/docmodel/moves_test.go +++ /dev/null @@ -1,298 +0,0 @@ -package docmodel - -import ( - "fmt" - "io" - "os" - "strings" - "testing" - - "github.com/stretchr/testify/require" - "github.com/tidwall/btree" -) - -func TestIneffectualNestedMoves(t *testing.T) { - state := newTreeCRDT() - - { - mut := state.mutate() - doMove(t, mut, moveEffectCreated, "b1", "", "") - doMove(t, mut, moveEffectCreated, "b2", "", "b1") - doMove(t, mut, moveEffectCreated, "b3", "", "b2") - require.NoError(t, mut.commit("alice-1", 1, state)) - } - - { - mut := state.mutate() - checkTree(t, mut, [][2]string{ - {"", "b1"}, - {"", "b2"}, - {"", "b3"}, - }) - - doMove(t, mut, moveEffectMoved, "b2", "b1", "") - doMove(t, mut, moveEffectMoved, "b3", "b1", "b2") - - checkTree(t, mut, [][2]string{ - {"", "b1"}, - {"b1", "b2"}, - {"b1", "b3"}, - }) - require.Equal(t, []dirtyMove{ - {"b2", "b1", "", ""}, - {"b3", "b1", "b2", ""}, - }, mut.dirtyMoves()) - - doMove(t, mut, moveEffectMoved, "b2", "", "b1") - doMove(t, mut, moveEffectMoved, "b3", "", "b2") - - checkTree(t, mut, [][2]string{ - {"", "b1"}, - {"", "b2"}, - {"", "b3"}, - }) - require.Len(t, mut.dirtyMoves(), 0, "ineffectual moves must not be generated") - } -} - -func TestIneffectualMoveRestore(t *testing.T) { - state := newTreeCRDT() - - { - mut := state.mutate() - doMove(t, mut, moveEffectCreated, "b1", "", "") - doMove(t, mut, moveEffectCreated, "b2", "", "b1") - doMove(t, mut, moveEffectCreated, "b3", "", "b2") - doMove(t, mut, moveEffectCreated, "b4", "", "b3") - require.NoError(t, mut.commit("alice-1", 1, state)) - } - - { - mut := state.mutate() - doMove(t, mut, moveEffectMoved, "b2", "", "") - doMove(t, mut, moveEffectMoved, "b2", "", "b4") - doMove(t, mut, moveEffectMoved, "b2", "", "b3") - doMove(t, mut, moveEffectMoved, "b2", "", "b3") - doMove(t, mut, moveEffectMoved, "b2", "", "") - doMove(t, mut, moveEffectMoved, "b2", "", "b1") - - checkTree(t, mut, [][2]string{ - {"", "b1"}, - {"", "b2"}, - {"", "b3"}, - {"", "b4"}, - }) - - require.Len(t, mut.dirtyMoves(), 0, "ineffectual moves must not be generated") - } -} - -func TestMoveSelection(t *testing.T) { - state := newTreeCRDT() - - { - mut := state.mutate() - doMove(t, mut, moveEffectCreated, "b1", "", "") - doMove(t, mut, moveEffectCreated, "b2", "", "b1") - doMove(t, mut, moveEffectCreated, "b3", "", "b2") - doMove(t, mut, moveEffectCreated, "b4", "", "b3") - require.NoError(t, mut.commit("alice-1", 1, state)) - } - - checkTree(t, state.mutate(), [][2]string{ - {"", "b1"}, - {"", "b2"}, - {"", "b3"}, - {"", "b4"}, - }) - - { - mut := state.mutate() - doMove(t, mut, moveEffectMoved, "b2", "", "") - doMove(t, mut, moveEffectMoved, "b3", "", "b2") - doMove(t, mut, moveEffectMoved, "b4", "", "b3") - checkTree(t, mut, [][2]string{ - {"", "b2"}, - {"", "b3"}, - {"", "b4"}, - {"", "b1"}, - }) - - wantMoves := []dirtyMove{ - {"b2", "", "", ""}, - {"b3", "", "b2", ""}, - {"b4", "", "b3", ""}, - } - require.Equal(t, wantMoves, mut.dirtyMoves()) - require.NoError(t, mut.commit("alice-2", 2, state)) - } - - checkTree(t, state.mutate(), [][2]string{ - {"", "b2"}, - {"", "b3"}, - {"", "b4"}, - {"", "b1"}, - }, "state after commit must be the same") -} - -func TestRedundantMoveMutation(t *testing.T) { - state := newTreeCRDT() - - move := func(mut *treeMutation, want moveEffect, a, b, c string) { - t.Helper() - got, err := mut.move(a, b, c) - require.NoError(t, err) - require.Equal(t, want, got) - } - - { - mut := state.mutate() - move(mut, moveEffectCreated, "b1", "", "") - move(mut, moveEffectCreated, "b2", "", "b1") - move(mut, moveEffectCreated, "b3", "", "b2") - move(mut, moveEffectMoved, "b2", "", "b1") - move(mut, moveEffectMoved, "b2", "", "b1") - require.NoError(t, mut.commit("alice", 1, state)) - } - - alice := state.mutate() - move(alice, moveEffectMoved, "b1", "b2", "") - move(alice, moveEffectMoved, "b3", "b2", "b1") - - bob := state.mutate() - move(bob, moveEffectMoved, "b2", "b1", "") - move(bob, moveEffectMoved, "b3", "b1", "b2") - move(bob, moveEffectCreated, "b4", "b1", "") - move(bob, moveEffectMoved, "b4", TrashNodeID, "") - - require.NoError(t, alice.commit("alice-1", 2, state)) - require.NoError(t, bob.commit("bob", 2, state)) - - checkTree(t, state.mutate(), [][2]string{ - {"", "b2"}, - {"b2", "b1"}, - {"b1", "b3"}, - }) -} - -func TestTreeState(t *testing.T) { - ts := newTreeCRDT() - - require.NoError(t, ts.integrate(newOpID("a", 1, 0), "b1", "", "", "")) - require.NoError(t, ts.integrate(newOpID("a", 2, 0), "b2", "", "b1", "a")) - require.NoError(t, ts.integrate(newOpID("b", 3, 0), "b1", "", "b2", "a")) -} - -func TestVisibleTree(t *testing.T) { - state := newTreeCRDT() - - require.NoError(t, state.integrate(newOpID("a", 1, 0), "b1", "", "", "")) - require.NoError(t, state.integrate(newOpID("a", 1, 1), "b2", "", "b1", "a")) - require.NoError(t, state.integrate(newOpID("a", 1, 2), "b3", "", "b2", "a")) - - // Concurrent conflicting changes. - - require.NoError(t, state.integrate(newOpID("b", 2, 0), "b1", "b2", "", "")) - require.NoError(t, state.integrate(newOpID("b", 2, 1), "b3", "b2", "b1", "b")) - - require.True(t, state.mutate().isAncestor("b2", "b1")) - - require.NoError(t, state.integrate(newOpID("c", 2, 0), "b2", "b1", "", "")) - require.NoError(t, state.integrate(newOpID("c", 2, 1), "b3", "b1", "b2", "c")) - - checkTree(t, state.mutate(), [][2]string{ - {"", "b2"}, - {"b2", "b1"}, - {"b1", "b3"}, - }) -} - -type dirtyMove struct { - Block string - Parent string - Left string - LeftOrigin string -} - -func (mut *treeMutation) dirtyMoves() []dirtyMove { - var out []dirtyMove - mut.forEachMove(func(block, parent, left, leftOrigin string) bool { - out = append(out, dirtyMove{ - Block: block, - Parent: parent, - Left: left, - LeftOrigin: leftOrigin, - }) - return true - }) - - return out -} - -func (mut *treeMutation) dump(w io.Writer) { - if w == nil { - w = os.Stdout - } - - var hint btree.PathHint - - pivot := &move{Fracdex: "~"} - - var stack []*move - - addChild := func(block string) { - pivot.Parent = block - mut.tree.DescendHint(pivot, func(x *move) bool { - if x == pivot { - return true - } - - if x.Parent != pivot.Parent { - return false - } - - if _, ok := mut.dirtyInvisibleMoves[x]; ok { - return true - } - - stack = append(stack, x) - - return true - }, &hint) - } - - addChild("") - - for len(stack) > 0 { - i := len(stack) - 1 - x := stack[i] - stack = stack[:i] - - fmt.Fprintln(w, x.Parent, x.Block) - - addChild(x.Block) - } -} - -func doMove(t *testing.T, mut *treeMutation, want moveEffect, block, parent, left string) { - t.Helper() - got, err := mut.move(block, parent, left) - require.NoError(t, err) - require.Equal(t, want, got) -} - -func checkTree(t *testing.T, mut *treeMutation, want [][2]string, vv ...any) { - t.Helper() - - var wb strings.Builder - for _, x := range want { - wb.WriteString(x[0]) - wb.WriteByte(' ') - wb.WriteString(x[1]) - wb.WriteByte('\n') - } - - var b strings.Builder - mut.dump(&b) - require.Equal(t, wb.String(), b.String(), vv...) -} diff --git a/backend/api/documents/v3alpha/documents.go b/backend/api/documents/v3alpha/documents.go index 1df42d77..12d9d267 100644 --- a/backend/api/documents/v3alpha/documents.go +++ b/backend/api/documents/v3alpha/documents.go @@ -11,8 +11,8 @@ import ( "seed/backend/blob" "seed/backend/core" documents "seed/backend/genproto/documents/v3alpha" - "seed/backend/hlc" "seed/backend/util/apiutil" + "seed/backend/util/cclock" "seed/backend/util/dqb" "seed/backend/util/errutil" "seed/backend/util/sqlite" @@ -128,9 +128,9 @@ func (srv *Server) CreateDocumentChange(ctx context.Context, in *documents.Creat if in.BaseVersion == "" { switch { // No base version is allowed for home documents with 1 change (which is the auto-generated genesis change). - case in.Path == "" && doc.Entity().NumChanges() == 1: + case in.Path == "" && doc.NumChanges() == 1: // No base version is allowed for newly created documents, i.e. when there's not changes applied yet. - case in.Path != "" && doc.Entity().NumChanges() == 0: + case in.Path != "" && doc.NumChanges() == 0: // Otherwise it's an error to not provide a base version. default: return nil, status.Errorf(codes.InvalidArgument, "base_version is required for updating existing documents") @@ -143,9 +143,9 @@ func (srv *Server) CreateDocumentChange(ctx context.Context, in *documents.Creat var newBlobs []blocks.Block - docChange, err := doc.Change(kp) + docChange, err := doc.SignChange(kp) if err != nil { - return nil, fmt.Errorf("failed to create subdoc change: %w", err) + return nil, fmt.Errorf("failed to create document change: %w", err) } newBlobs = append(newBlobs, docChange) @@ -359,7 +359,7 @@ func (srv *Server) DeleteDocument(ctx context.Context, in *documents.DeleteDocum } func (srv *Server) ensureProfileGenesis(ctx context.Context, kp core.KeyPair) error { - ebc, err := blob.NewChange(kp, nil, "Create", nil, blob.ProfileGenesisEpoch) + ebc, err := blob.NewChange(kp, cid.Undef, nil, 0, nil, blob.ZeroUnixTime()) if err != nil { return err } @@ -369,7 +369,12 @@ func (srv *Server) ensureProfileGenesis(ctx context.Context, kp core.KeyPair) er return err } - ebr, err := blob.NewRef(kp, ebc.CID, iri, []cid.Cid{ebc.CID}, blob.ProfileGenesisEpoch) + space, path, err := iri.SpacePath() + if err != nil { + return err + } + + ebr, err := blob.NewRef(kp, ebc.CID, space, path, []cid.Cid{ebc.CID}, blob.ZeroUnixTime()) if err != nil { return err } @@ -391,13 +396,16 @@ func (srv *Server) loadDocument(ctx context.Context, account core.Principal, pat return nil, err } - clock := hlc.NewClock() - entity := docmodel.NewEntityWithClock(iri, clock) + clock := cclock.New() + doc, err := docmodel.New(iri, clock) + if err != nil { + return nil, err + } var outErr error changes, check := srv.idx.IterChanges(ctx, iri, account) for _, ch := range changes { - if err := entity.ApplyChange(ch); err != nil { + if err := doc.ApplyChange(ch.CID, ch.Data); err != nil { outErr = errors.Join(outErr, err) break } @@ -407,7 +415,7 @@ func (srv *Server) loadDocument(ctx context.Context, account core.Principal, pat return nil, outErr } - if !ensurePath && len(entity.Heads()) == 0 { + if !ensurePath && len(doc.Heads()) == 0 { return nil, status.Errorf(codes.NotFound, "document not found: %s", iri) } @@ -417,18 +425,13 @@ func (srv *Server) loadDocument(ctx context.Context, account core.Principal, pat return nil, err } - entity, err = entity.Checkout(heads) + doc, err = doc.Checkout(heads) if err != nil { - return nil, fmt.Errorf("failed to checkout version %s", version) + return nil, fmt.Errorf("failed to checkout version: %w", err) } } - doc, err := docmodel.New(entity, clock.MustNow()) - if err != nil { - return nil, err - } - - return doc, nil + return doc, err } func applyChanges(doc *docmodel.Document, ops []*documents.DocumentChange) error { @@ -477,7 +480,7 @@ func (srv *Server) checkWriteAccess(ctx context.Context, account core.Principal, return err } - if !cpb.Account.Equal(account) { + if !cpb.Space.Equal(account) { return status.Errorf(codes.PermissionDenied, "capability %s is not from account %s", capc, account) } @@ -485,7 +488,7 @@ func (srv *Server) checkWriteAccess(ctx context.Context, account core.Principal, return status.Errorf(codes.PermissionDenied, "capability %s is not delegated to key %s", capc, kp.Principal()) } - grantedIRI, err := makeIRI(cpb.Account, cpb.Path) + grantedIRI, err := makeIRI(cpb.Space, cpb.Path) if err != nil { return err } diff --git a/backend/blob/blob.go b/backend/blob/blob.go new file mode 100644 index 00000000..4d8e0583 --- /dev/null +++ b/backend/blob/blob.go @@ -0,0 +1,71 @@ +// Package blob defines our core blob types for the permanent data layer. +package blob + +import ( + "seed/backend/util/cclock" + "time" + + cbornode "github.com/ipfs/go-ipld-cbor" + "github.com/polydawn/refmt/obj/atlas" +) + +// ClockPrecision is the default precision we use for our timestaps in permanent data. +// It corresponds to the precision in the cclock package. +// This must be the same as precision used in the encoder/decoder transformation bellow. +const ClockPrecision = cclock.DefaultPrecision + +func init() { + // Encode timestamps as Unix milliseconds. Should be enough precision. + cbornode.RegisterCborType(atlas.BuildEntry(time.Time{}). + Transform(). + TransformMarshal(atlas.MakeMarshalTransformFunc(func(t time.Time) (int64, error) { + if !t.Equal(t.Round(ClockPrecision)) { + panic("BUG: trying to encoded a non-rounded time.Time") + } + + return t.UnixMilli(), nil + })). + TransformUnmarshal(atlas.MakeUnmarshalTransformFunc(func(in int64) (time.Time, error) { + return time.UnixMilli(in), nil + })). + Complete(), + ) +} + +var unixZero = time.Unix(0, 0).UTC().Round(ClockPrecision) + +// ZeroUnixTime returns a zero timestamp. +// We use it whenever we need determinism in data that has timestamps. +// Namely, we use it to create a sentinel genesis Change for all the Account/Space home documents. +func ZeroUnixTime() time.Time { + return unixZero +} + +// CBORToMap converts a CBOR object to a map. +// TODO(burdiyan): This is a workaround. Should not exist. +func CBORToMap(v any) map[string]any { + data, err := cbornode.DumpObject(v) + if err != nil { + panic(err) + } + + var m map[string]any + if err := cbornode.DecodeInto(data, &m); err != nil { + panic(err) + } + + return m +} + +// MapToCBOR converts a map to a CBOR object. +// TODO(burdiyan): This is a workaround. Should not exist. +func MapToCBOR(data map[string]any, v any) { + rawData, err := cbornode.DumpObject(data) + if err != nil { + panic(err) + } + + if err := cbornode.DecodeInto(rawData, v); err != nil { + panic(err) + } +} diff --git a/backend/blob/blob_capability.go b/backend/blob/blob_capability.go index 5fae5c8e..1b477f1a 100644 --- a/backend/blob/blob_capability.go +++ b/backend/blob/blob_capability.go @@ -19,28 +19,31 @@ func init() { cbornode.RegisterCborType(CapabilityUnsigned{}) } +// Capability is a blob that represents some granted rights from the issuer to the delegate key. type Capability struct { CapabilityUnsigned Sig core.Signature `refmt:"sig,omitempty"` } +// CapabilityUnsigned holds the fields of a Capability that are meant to be signed. type CapabilityUnsigned struct { - Type blobType `refmt:"@type"` + Type blobType `refmt:"type"` Issuer core.Principal `refmt:"issuer"` Delegate core.Principal `refmt:"delegate"` - Account core.Principal `refmt:"account"` + Space core.Principal `refmt:"space"` Path string `refmt:"path,omitempty"` Role string `refmt:"role"` - Ts int64 `refmt:"ts"` + Ts time.Time `refmt:"ts"` NoRecursive bool `refmt:"noRecursive,omitempty"` } -func NewCapability(issuer core.KeyPair, delegate, account core.Principal, path string, role string, ts int64, noRecursive bool) (eb Encoded[*Capability], err error) { +// NewCapability creates a new Capability blob. +func NewCapability(issuer core.KeyPair, delegate, space core.Principal, path string, role string, ts time.Time, noRecursive bool) (eb Encoded[*Capability], err error) { cu := CapabilityUnsigned{ Type: blobTypeCapability, Issuer: issuer.Principal(), Delegate: delegate, - Account: account, + Space: space, Path: path, Role: role, Ts: ts, @@ -55,6 +58,7 @@ func NewCapability(issuer core.KeyPair, delegate, account core.Principal, path s return encodeBlob(cc) } +// Sign signs the Capability with the given key pair. func (c CapabilityUnsigned) Sign(kp core.KeyPair) (cc *Capability, err error) { if !kp.Principal().Equal(c.Issuer) { return cc, fmt.Errorf("signing key %s must be equal to issuer %s", kp.Principal(), c.Issuer) @@ -99,12 +103,12 @@ func init() { } func indexCapability(ictx *indexingCtx, id int64, c cid.Cid, v *Capability) error { - iri, err := NewIRI(v.Account, v.Path) + iri, err := NewIRI(v.Space, v.Path) if err != nil { return err } - sb := newStructuralBlob(c, string(blobTypeCapability), v.Issuer, time.UnixMicro(v.Ts), iri, cid.Undef, v.Account, time.Time{}) + sb := newStructuralBlob(c, string(blobTypeCapability), v.Issuer, v.Ts, iri, cid.Undef, v.Space, time.Time{}) if _, err := ictx.ensurePubKey(v.Issuer); err != nil { return err diff --git a/backend/blob/blob_change.go b/backend/blob/blob_change.go index 38bb00db..e82ee3fa 100644 --- a/backend/blob/blob_change.go +++ b/backend/blob/blob_change.go @@ -2,42 +2,86 @@ package blob import ( "bytes" - "encoding/json" + "cmp" + "encoding/binary" "fmt" "net/url" "seed/backend/core" - documents "seed/backend/genproto/documents/v3alpha" - "seed/backend/hlc" "seed/backend/ipfs" - "seed/backend/util/must" "time" "github.com/ipfs/go-cid" cbornode "github.com/ipfs/go-ipld-cbor" "github.com/multiformats/go-multicodec" - "google.golang.org/protobuf/encoding/protojson" ) -var ProfileGenesisEpoch = must.Do2(time.ParseInLocation(time.RFC3339, "2024-01-01T00:00:00Z", time.UTC)).UnixMicro() - func init() { cbornode.RegisterCborType(Change{}) cbornode.RegisterCborType(ChangeUnsigned{}) + cbornode.RegisterCborType(Op{}) } const blobTypeChange blobType = "Change" +// OpType is a type for operation types. +type OpType string + +// Op is an atom of our op-based CRDT structure. +type Op struct { + Type OpType `refmt:"type"` + Data map[string]any `refmt:"data,omitempty"` +} + +// Supported op types. +const ( + OpSetMetadata OpType = "SetMetadata" // Args = key => value. + OpMoveBlock OpType = "MoveBlock" // Args = block, parent, left+origin. + OpReplaceBlock OpType = "ReplaceBlock" // Args = id => block data. +) + +// NewOpSetMetadata creates a SetMetadata op. +func NewOpSetMetadata(key string, value any) Op { + return Op{ + Type: OpSetMetadata, + Data: map[string]any{key: value}, // TODO(burdiyan): or key => key, value => value? + } +} + +// NewOpMoveBlock creates a MoveBlock op. +func NewOpMoveBlock(block, parent, leftOrigin string) Op { + return Op{ + Type: OpMoveBlock, + Data: map[string]any{ + "block": block, + "parent": parent, + "leftOrigin": leftOrigin, + }, + } +} + +// NewOpReplaceBlock creates a ReplaceBlock op. +func NewOpReplaceBlock(state Block) Op { + return Op{ + Type: OpReplaceBlock, + Data: CBORToMap(state), + } +} + +// Change is an atomic change to a document. +// The linked DAG of Changes represents the state of a document over time. type Change struct { ChangeUnsigned Sig core.Signature `refmt:"sig,omitempty"` } -func NewChange(kp core.KeyPair, deps []cid.Cid, action string, payload map[string]any, ts int64) (eb Encoded[*Change], err error) { +// NewChange creates a new Change. +func NewChange(kp core.KeyPair, genesis cid.Cid, deps []cid.Cid, depth int, ops []Op, ts time.Time) (eb Encoded[*Change], err error) { cu := ChangeUnsigned{ Type: blobTypeChange, + Genesis: genesis, Deps: deps, - Action: action, - Payload: payload, + Depth: depth, + Ops: ops, Author: kp.Principal(), Ts: ts, } @@ -50,15 +94,18 @@ func NewChange(kp core.KeyPair, deps []cid.Cid, action string, payload map[strin return encodeBlob(cc) } +// ChangeUnsigned holds the fields of a Change that are supposed to be signed. type ChangeUnsigned struct { - Type blobType `refmt:"@type"` + Type blobType `refmt:"type"` + Genesis cid.Cid `refmt:"genesis,omitempty"` Deps []cid.Cid `refmt:"deps,omitempty"` - Action string `refmt:"action"` - Payload map[string]any `refmt:"payload"` + Depth int `refmt:"depth,omitempty"` + Ops []Op `refmt:"ops,omitempty"` Author core.Principal `refmt:"author"` - Ts int64 `refmt:"ts"` + Ts time.Time `refmt:"ts"` } +// Sign the change with the provided key pair. func (c *ChangeUnsigned) Sign(kp core.KeyPair) (cc *Change, err error) { if !c.Author.Equal(kp.Principal()) { return nil, fmt.Errorf("author mismatch when signing") @@ -80,6 +127,109 @@ func (c *ChangeUnsigned) Sign(kp core.KeyPair) (cc *Change, err error) { }, nil } +type OpID struct { + Ts uint64 + Idx uint32 + Origin uint64 +} + +const ( + maxTimestamp = 1<<48 - 1 + maxIdx = 1<<24 - 1 + maxOrigin = 1<<48 - 1 +) + +func newOpID(ts uint64, idx uint32, origin uint64) OpID { + if ts >= maxTimestamp { + panic("BUG: operation timestamp is too large") + } + + if idx >= maxIdx { + panic("BUG: operation index is too large") + } + + if origin >= maxOrigin { + panic("BUG: operation origin is too large") + } + + return OpID{ + Ts: ts, + Origin: origin, + Idx: idx, + } +} + +func (o OpID) Compare(oo OpID) int { + if o.Ts < oo.Ts { + return -1 + } + + if o.Ts > oo.Ts { + return +1 + } + + if o.Idx < oo.Idx { + return -1 + } + + if o.Idx > oo.Idx { + return +1 + } + + return cmp.Compare(o.Origin, oo.Origin) +} + +func (op OpID) Encode() EncodedOpID { + var ( + e EncodedOpID + scratch [8]byte + ) + + binary.BigEndian.PutUint64(scratch[:], uint64(op.Ts)) + copy(e[:6], scratch[2:]) + + binary.BigEndian.PutUint32(scratch[:], op.Idx) + copy(e[6:6+3], scratch[1:]) + + binary.BigEndian.PutUint64(scratch[:], op.Origin) + copy(e[9:], scratch[2:]) + + return e +} + +// EncodedOpID is a CRDT Op ID that is compactly encoded in the following way: +// - 6 bytes (48 bits): timestamp. Enough precision to track Unix millisecond timestamps for thousands for years. +// - 3 bytes (24 bits): index/offset of the operation within the same Change/Transaction. +// - 6 bytes (48 bits): origin/replica/actor. Random 48-bit value of a replica that generated the operation. +// The timestamp and index are big-endian, to support lexicographic ordering of the IDs. +// This has some limitations: +// 1. Maximum number of operations in a single change is 16777215. +// 2. Same actor must not generate Changes/Transactions within the same millisecond. +// 3. The clocks on the devices generating the operations must be roughly syncronized to avoid inter-device conflicts in timestamps. +type EncodedOpID [15]byte + +func (e EncodedOpID) Decode() OpID { + var ( + out OpID + scratch [8]byte + ) + + copy(scratch[2:], e[:6]) + scratch[0] = 0 + scratch[1] = 0 + out.Ts = binary.BigEndian.Uint64(scratch[:]) + + copy(scratch[1:], e[6:6+3]) + out.Idx = binary.BigEndian.Uint32(scratch[:5]) + + copy(scratch[2:], e[9:]) + scratch[0] = 0 + scratch[1] = 0 + out.Origin = binary.BigEndian.Uint64(scratch[:]) + + return out +} + func init() { matcher := makeCBORTypeMatch(blobTypeChange) registerIndexer(blobTypeChange, @@ -105,13 +255,24 @@ func indexChange(ictx *indexingCtx, id int64, c cid.Cid, v *Change) error { author := v.Author + switch { + case v.Genesis.Defined() && len(v.Deps) > 0 && v.Depth > 0: + // Non-genesis change. + case !v.Genesis.Defined() && len(v.Deps) == 0 && v.Depth == 0: + // Genesis change. + default: + // Everything else is invalid. + return fmt.Errorf("invalid change causality invariants: cid=%s genesis=%s deps=%v depth=%v", c, v.Genesis, v.Deps, v.Depth) + } + var sb StructuralBlob { var resourceTime time.Time - if v.Action == "Create" { - resourceTime = hlc.Timestamp(v.Ts).Time() + // Change with no deps is the genesis change. + if len(v.Deps) == 0 { + resourceTime = v.Ts } - sb = newStructuralBlob(c, string(blobTypeChange), author, hlc.Timestamp(v.Ts).Time(), "", cid.Undef, author, resourceTime) + sb = newStructuralBlob(c, string(blobTypeChange), author, v.Ts, "", v.Genesis, author, resourceTime) } // TODO(burdiyan): ensure deps are indexed, not just known. @@ -124,114 +285,84 @@ func indexChange(ictx *indexingCtx, id int64, c cid.Cid, v *Change) error { sb.AddBlobLink("change/dep", dep) } - // TODO(burdiyan): remove this when all the tests are fixed. Sometimes CBOR codec decodes into - // different types than what was encoded, and we might not have accounted for that during indexing. - // So we re-encode the patch here to make sure. - // This is of course very wasteful. - // EDIT: actually re-encoding is probably not a bad idea to enforce the canonical encoding, and hash correctness. - // But it would probably need to happen in some other layer, and more generalized. - { - data, err := cbornode.DumpObject(v.Payload) - if err != nil { - return err - } - v.Payload = nil - - if err := cbornode.DecodeInto(data, &v.Payload); err != nil { - return err - } + var meta struct { + Title string `json:"title"` } + for _, op := range v.Ops { + switch op.Type { + case OpSetMetadata: + for k, v := range op.Data { + vs, ok := v.(string) + if !ok { + continue + } - if v.Payload["metadata"] != nil { - for k, v := range v.Payload["metadata"].(map[string]any) { - vs, ok := v.(string) - if !ok { - continue - } - - u, err := url.Parse(vs) - if err != nil { - continue - } - - if u.Scheme != "ipfs" { - continue - } + if meta.Title == "" && (k == "title" || k == "name" || k == "alias") { + meta.Title = vs + } - c, err := cid.Decode(u.Host) - if err != nil { - continue - } + u, err := url.Parse(vs) + if err != nil { + continue + } - sb.AddBlobLink("metadata/"+k, c) + if u.Scheme != "ipfs" { + continue + } - // TODO(hm24): index other relevant metadata for list response and so on. - } - } + c, err := cid.Decode(u.Host) + if err != nil { + continue + } - blocks, ok := v.Payload["blocks"].(map[string]any) - if ok { - for id, blk := range blocks { - v, ok := blk.(map[string]any)["#map"] - if !ok { - continue + sb.AddBlobLink("metadata/"+k, c) + // TODO(hm24): index other relevant metadata for list response and so on. } - // This is a very bad way to convert an opaque map into a block struct. - // TODO(burdiyan): we should do better than this. This is ugly as hell. - data, err := json.Marshal(v) + case OpReplaceBlock: + rawBlock, err := cbornode.DumpObject(op.Data) if err != nil { - return err + return fmt.Errorf("bad data?: failed to encode block into cbor when indexing: %w", err) } - blk := &documents.Block{} - if err := protojson.Unmarshal(data, blk); err != nil { - return err + + var blk Block + if err := cbornode.DecodeInto(rawBlock, &blk); err != nil { + return fmt.Errorf("bad data?: failed to decode cbor block: %w", err) } - blk.Id = id - blk.Revision = c.String() - if err := indexURL(&sb, ictx.log, blk.Id, "doc/"+blk.Type, blk.Ref); err != nil { + + if err := indexURL(&sb, ictx.log, blk.ID, "doc/"+blk.Type, blk.Link); err != nil { return err } for _, ann := range blk.Annotations { - if err := indexURL(&sb, ictx.log, blk.Id, "doc/"+ann.Type, ann.Ref); err != nil { + if err := indexURL(&sb, ictx.log, blk.ID, "doc/"+ann.Type, ann.Link); err != nil { return err } } } } - index, ok := v.Payload["index"].(map[string]any) - if ok { - for key, v := range index { - heads, ok := v.([]cid.Cid) - if !ok { - continue - } - for _, head := range heads { - sb.AddBlobLink("index/"+key, head) - } - } - } - type meta struct { - Title string `json:"title"` + if meta.Title != "" { + sb.Meta = meta } - attrs, ok := v.Payload["metadata"].(map[string]any) - if ok { - title, ok := attrs["title"] - if !ok { - alias, ok := attrs["alias"] - if ok { - sb.Meta = meta{Title: alias.(string)} - } else { - name, ok := attrs["name"] - if ok { - sb.Meta = meta{Title: name.(string)} - } - } - } else { - sb.Meta = meta{Title: title.(string)} - } - } return ictx.SaveBlob(id, sb) } + +// Block is a block of text with annotations. +type Block struct { + ID string `refmt:"id,omitempty"` // Omitempty when used in Documents. + Type string `refmt:"type,omitempty"` + Text string `refmt:"text,omitempty"` + Link string `refmt:"link,omitempty"` + Attributes map[string]string `refmt:"attributes,omitempty"` + Annotations []Annotation `refmt:"annotations,omitempty"` +} + +// Annotation is a range of text that has a type and attributes. +type Annotation struct { + Type string `refmt:"type"` + Link string `refmt:"link,omitempty"` + Attributes map[string]string `refmt:"attributes,omitempty"` + Starts []int32 `refmt:"starts,omitempty"` + Ends []int32 `refmt:"ends,omitempty"` +} diff --git a/backend/blob/blob_change_test.go b/backend/blob/blob_change_test.go new file mode 100644 index 00000000..2908e4c9 --- /dev/null +++ b/backend/blob/blob_change_test.go @@ -0,0 +1,36 @@ +package blob + +import ( + "testing" +) + +func TestEncodeOpID(t *testing.T) { + tests := []struct { + name string + op OpID + }{ + { + name: "Zero values", + op: OpID{Ts: 0, Idx: 0, Origin: 0}, + }, + { + name: "Maximum values", + op: OpID{Ts: maxTimestamp, Idx: maxIdx, Origin: maxOrigin}, + }, + { + name: "Random values", + op: OpID{Ts: 1234567890, Idx: 9876, Origin: 987654321}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + encoded := tt.op.Encode() + decoded := encoded.Decode() + + if decoded != tt.op { + t.Errorf("Round-trip failed. Got %+v, want %+v", decoded, tt.op) + } + }) + } +} diff --git a/backend/blob/blob_comment.go b/backend/blob/blob_comment.go index f3f9eb4d..8d3df906 100644 --- a/backend/blob/blob_comment.go +++ b/backend/blob/blob_comment.go @@ -21,20 +21,33 @@ func init() { cbornode.RegisterCborType(Block{}) cbornode.RegisterCborType(Annotation{}) cbornode.RegisterCborType(CommentBlock{}) - cbornode.RegisterCborType(CommentTarget{}) } +// Comment is a blob that represents a comment to some document, or a reply to some other comment. type Comment struct { CommentUnsigned Sig core.Signature `refmt:"sig,omitempty"` } -func NewComment(kp core.KeyPair, cpb cid.Cid, t CommentTarget, threadRoot, replyParent cid.Cid, body []CommentBlock, ts int64) (eb Encoded[*Comment], err error) { +// NewComment creates a new Comment blob. +func NewComment( + kp core.KeyPair, + cpb cid.Cid, + space core.Principal, + path string, + version []cid.Cid, + threadRoot cid.Cid, + replyParent cid.Cid, + body []CommentBlock, + ts time.Time, +) (eb Encoded[*Comment], err error) { cu := CommentUnsigned{ Type: blobTypeComment, Capability: cpb, Author: kp.Principal(), - Target: t, + Space: space, + Path: path, + Version: version, ThreadRoot: threadRoot, ReplyParent: replyParent, Body: body, @@ -49,17 +62,21 @@ func NewComment(kp core.KeyPair, cpb cid.Cid, t CommentTarget, threadRoot, reply return encodeBlob(cc) } +// CommentUnsigned holds the fields of a Comment that are meant to be signed. type CommentUnsigned struct { - Type blobType `refmt:"@type"` + Type blobType `refmt:"type"` Capability cid.Cid `refmt:"capability,omitempty"` Author core.Principal `refmt:"author"` - Target CommentTarget `refmt:"target"` + Space core.Principal `refmt:"space"` + Path string `refmt:"path,omitempty"` + Version []cid.Cid `refmt:"version,omitempty"` ThreadRoot cid.Cid `refmt:"threadRoot,omitempty"` ReplyParent cid.Cid `refmt:"replyParent,omitempty"` Body []CommentBlock `refmt:"body"` - Ts int64 `refmt:"ts"` + Ts time.Time `refmt:"ts"` } +// Sign signs the CommentUnsigned with the given keypair. func (r *CommentUnsigned) Sign(kp core.KeyPair) (rr *Comment, err error) { if !r.Author.Equal(kp.Principal()) { return nil, fmt.Errorf("author mismatch when signing") @@ -81,31 +98,6 @@ func (r *CommentUnsigned) Sign(kp core.KeyPair) (rr *Comment, err error) { }, nil } -type CommentTarget struct { - Account core.Principal `refmt:"account"` - Path string `refmt:"path,omitempty"` - Version []cid.Cid `refmt:"version,omitempty"` -} - -// Block is a block of text with annotations. -type Block struct { - ID string `refmt:"id,omitempty"` // Omitempty when used in Documents. - Type string `refmt:"type,omitempty"` - Text string `refmt:"text,omitempty"` - Ref string `refmt:"ref,omitempty"` - Attributes map[string]string `refmt:"attributes,omitempty"` - Annotations []Annotation `refmt:"annotations,omitempty"` -} - -// Annotation is a range of text that has a type and attributes. -type Annotation struct { - Type string `refmt:"type"` - Ref string `refmt:"ref,omitempty"` - Attributes map[string]string `refmt:"attributes,omitempty"` - Starts []int32 `refmt:"starts,omitempty"` - Ends []int32 `refmt:"ends,omitempty"` -} - // CommentBlock is a block of text with annotations. type CommentBlock struct { Block @@ -135,7 +127,7 @@ func init() { } func indexComment(ictx *indexingCtx, id int64, c cid.Cid, v *Comment) error { - riri, err := NewIRI(v.Target.Account, v.Target.Path) + riri, err := NewIRI(v.Space, v.Path) if err != nil { return fmt.Errorf("invalid comment target: %v", err) } @@ -166,14 +158,14 @@ func indexComment(ictx *indexingCtx, id int64, c cid.Cid, v *Comment) error { // - This comment must have a timestamp greater than any other predecessor comment. } - sb := newStructuralBlob(c, string(v.Type), v.Author, time.UnixMicro(v.Ts), riri, cid.Undef, v.Target.Account, time.Time{}) + sb := newStructuralBlob(c, string(v.Type), v.Author, v.Ts, riri, cid.Undef, v.Space, time.Time{}) targetURI, err := url.Parse(string(riri)) if err != nil { return err } - targetVersion := NewVersion(v.Target.Version...) + targetVersion := NewVersion(v.Version...) if targetVersion != "" { q := targetURI.Query() q.Set("v", targetVersion.String()) @@ -199,12 +191,12 @@ func indexComment(ictx *indexingCtx, id int64, c cid.Cid, v *Comment) error { var indexCommentContent func([]CommentBlock) error // Declaring function to allow recursive calls. indexCommentContent = func(in []CommentBlock) error { for _, blk := range in { - if err := indexURL(&sb, ictx.log, blk.ID, "comment/"+blk.Type, blk.Ref); err != nil { + if err := indexURL(&sb, ictx.log, blk.ID, "comment/"+blk.Type, blk.Link); err != nil { return err } for _, a := range blk.Annotations { - if err := indexURL(&sb, ictx.log, blk.ID, "comment/"+a.Type, a.Ref); err != nil { + if err := indexURL(&sb, ictx.log, blk.ID, "comment/"+a.Type, a.Link); err != nil { return err } } diff --git a/backend/blob/blob_ref.go b/backend/blob/blob_ref.go index 423df46b..25687bc5 100644 --- a/backend/blob/blob_ref.go +++ b/backend/blob/blob_ref.go @@ -4,7 +4,6 @@ import ( "bytes" "fmt" "seed/backend/core" - "seed/backend/hlc" "seed/backend/ipfs" "time" @@ -20,15 +19,20 @@ func init() { cbornode.RegisterCborType(RefUnsigned{}) } +// Ref is a blob that claims an entry for a path in a space +// to point to some other blobs, namely document changes. +// It's similar to a Git Ref, but is signed. type Ref struct { RefUnsigned Sig core.Signature `refmt:"sig,omitempty"` } -func NewRef(kp core.KeyPair, genesis cid.Cid, rid IRI, heads []cid.Cid, ts int64) (eb Encoded[*Ref], err error) { +// NewRef creates a new Ref blob. +func NewRef(kp core.KeyPair, genesis cid.Cid, space core.Principal, path string, heads []cid.Cid, ts time.Time) (eb Encoded[*Ref], err error) { ru := RefUnsigned{ Type: blobTypeRef, - Resource: rid, + Space: space, + Path: path, GenesisBlob: genesis, Heads: heads, Author: kp.Principal(), @@ -43,16 +47,19 @@ func NewRef(kp core.KeyPair, genesis cid.Cid, rid IRI, heads []cid.Cid, ts int64 return encodeBlob(cc) } +// RefUnsigned holds the fields of a Ref that are meant to be signed. type RefUnsigned struct { - Type blobType `refmt:"@type"` - Resource IRI `refmt:"resource"` + Type blobType `refmt:"type"` + Space core.Principal `refmt:"space"` + Path string `refmt:"path,omitempty"` GenesisBlob cid.Cid `refmt:"genesisBlob"` Capability cid.Cid `refmt:"capability,omitempty"` Heads []cid.Cid `refmt:"heads"` Author core.Principal `refmt:"author"` - Ts int64 `refmt:"ts"` + Ts time.Time `refmt:"ts"` } +// Sign the ref blob with the provided key pair. func (r *RefUnsigned) Sign(kp core.KeyPair) (rr *Ref, err error) { if !r.Author.Equal(kp.Principal()) { return nil, fmt.Errorf("author mismatch when signing") @@ -98,11 +105,16 @@ func init() { func indexRef(ictx *indexingCtx, id int64, c cid.Cid, v *Ref) error { // TODO(hm24): more validation and refs for docs. + iri, err := NewIRI(v.Space, v.Path) + if err != nil { + return err + } + var sb StructuralBlob - if v.Ts == ProfileGenesisEpoch { - sb = newStructuralBlob(c, string(blobTypeRef), v.Author, hlc.Timestamp(v.Ts).Time(), v.Resource, v.GenesisBlob, v.Author, hlc.Timestamp(v.Ts).Time()) + if v.Ts.Equal(unixZero) { + sb = newStructuralBlob(c, string(blobTypeRef), v.Author, v.Ts, iri, v.GenesisBlob, v.Author, v.Ts) } else { - sb = newStructuralBlob(c, string(blobTypeRef), v.Author, hlc.Timestamp(v.Ts).Time(), v.Resource, v.GenesisBlob, nil, time.Time{}) + sb = newStructuralBlob(c, string(blobTypeRef), v.Author, v.Ts, iri, v.GenesisBlob, nil, time.Time{}) } if len(v.Heads) == 0 { diff --git a/backend/blob/index.go b/backend/blob/index.go index 9598526b..c137e1dd 100644 --- a/backend/blob/index.go +++ b/backend/blob/index.go @@ -46,6 +46,21 @@ func NewIRI(account core.Principal, path string) (IRI, error) { return IRI("hm://" + account.String() + path), nil } +// SpacePath parses IRI into space+path tuple if possible. +func (iri IRI) SpacePath() (space core.Principal, path string, err error) { + u, err := url.Parse(string(iri)) + if err != nil { + return nil, "", err + } + + space, err = core.DecodePrincipal(u.Host) + if err != nil { + return nil, "", err + } + + return space, u.Path, nil +} + type Index struct { bs *blockStore db *sqlitex.Pool @@ -486,7 +501,7 @@ func (idx *indexingCtx) SaveBlob(id int64, b StructuralBlob) error { if !b.Ts.IsZero() { // For changes we need microsecond timestamp, so we use it for all the blobs. - blobTime = maybe.New(b.Ts.UnixMicro()) + blobTime = maybe.New(b.Ts.UnixMilli()) } if err := dbStructuralBlobsInsert(idx.conn, id, b.Type, blobAuthor, blobResource, blobTime, blobMeta); err != nil { diff --git a/backend/blob/index_sql.go b/backend/blob/index_sql.go index f865d8ea..bc209bfe 100644 --- a/backend/blob/index_sql.go +++ b/backend/blob/index_sql.go @@ -284,10 +284,6 @@ func dbResourcesMaybeSetTimestamp(conn *sqlite.Conn, id, ts int64) (updated bool return false, fmt.Errorf("must specify resource ID") } - if ts == 0 { - return false, fmt.Errorf("must specify timestamp") - } - if err := sqlitex.Exec(conn, qResourcesMaybeSetTimestamp(), nil, ts, id); err != nil { return false, err } diff --git a/backend/blob/registry.go b/backend/blob/registry.go index 96006d20..39346e83 100644 --- a/backend/blob/registry.go +++ b/backend/blob/registry.go @@ -56,11 +56,11 @@ func registerIndexer[T any]( } // makeCBORTypeMatch returns a subslice of CBOR bytes that could be used to match -// our CBOR blob types with `@type` field. If we find this subslice +// our CBOR blob types with `type` field. If we find this subslice // we can attempt to decode the blob as CBOR data into the corresponding concrete type. func makeCBORTypeMatch(blobType blobType) []byte { var b bytes.Buffer - if err := cbor.MarshalToBuffer("@type", &b); err != nil { + if err := cbor.MarshalToBuffer("type", &b); err != nil { panic(err) } diff --git a/backend/crdt/crdt.go b/backend/crdt/crdt.go index 85d7c97f..25946c80 100644 --- a/backend/crdt/crdt.go +++ b/backend/crdt/crdt.go @@ -1,6 +1,8 @@ // Package crdt provides Seed-specific CRDTs. These are not meant to be general-purpose CRDTs, // but still are generic enough and could be extended for some other use cases. In case of tradeoffs, // we favor Seed-specific use cases. +// +// Deprecated: This package is abandoned and is only here for historical reasons. package crdt import "fmt" diff --git a/backend/crdt2/crdt.go b/backend/crdt2/crdt.go new file mode 100644 index 00000000..b1df603b --- /dev/null +++ b/backend/crdt2/crdt.go @@ -0,0 +1,4 @@ +// Package crdt2 provides a LWW map CRDT which is similar to Shelf's map CRDT. +// +// Deprecated: This package is abandoned and is only here for historical reasons. +package crdt2 diff --git a/backend/daemon/daemon_e2e_test.go b/backend/daemon/daemon_e2e_test.go index 9f29af9e..9d6eda67 100644 --- a/backend/daemon/daemon_e2e_test.go +++ b/backend/daemon/daemon_e2e_test.go @@ -71,51 +71,88 @@ func TestDaemonRegisterKey(t *testing.T) { } } +type changeBuilder struct { + req *documents.CreateDocumentChangeRequest +} + +func newChangeBuilder(account core.Principal, path, baseVersion, keyName string) *changeBuilder { + return &changeBuilder{ + req: &documents.CreateDocumentChangeRequest{ + Account: account.String(), + Path: path, + BaseVersion: baseVersion, + SigningKeyName: keyName, + }, + } +} + +func (b *changeBuilder) SetMetadata(key, value string) *changeBuilder { + b.req.Changes = append(b.req.Changes, &documents.DocumentChange{ + Op: &documents.DocumentChange_SetMetadata_{ + SetMetadata: &documents.DocumentChange_SetMetadata{Key: key, Value: value}, + }, + }) + return b +} + +func (b *changeBuilder) MoveBlock(blockID, parent, leftSibling string) *changeBuilder { + b.req.Changes = append(b.req.Changes, &documents.DocumentChange{ + Op: &documents.DocumentChange_MoveBlock_{ + MoveBlock: &documents.DocumentChange_MoveBlock{BlockId: blockID, Parent: parent, LeftSibling: leftSibling}, + }, + }) + return b +} + +func (b *changeBuilder) ReplaceBlock(block, btype, text string, annotations ...*documents.Annotation) *changeBuilder { + b.req.Changes = append(b.req.Changes, &documents.DocumentChange{ + Op: &documents.DocumentChange_ReplaceBlock{ + ReplaceBlock: &documents.Block{ + Id: block, + Type: btype, + Text: text, + Annotations: annotations, + }, + }, + }) + return b +} + +func (b *changeBuilder) DeleteBlock(block string) *changeBuilder { + b.req.Changes = append(b.req.Changes, &documents.DocumentChange{ + Op: &documents.DocumentChange_DeleteBlock{DeleteBlock: block}, + }) + return b +} + +func (b *changeBuilder) Build() *documents.CreateDocumentChangeRequest { + return b.req +} + func TestDaemonUpdateProfile(t *testing.T) { t.Parallel() + dmn := makeTestApp(t, "alice", makeTestConfig(t), true) ctx := context.Background() - alice := coretest.NewTester("alice") + alice := coretest.NewTester("alice").Account.Principal() - doc, err := dmn.RPC.DocumentsV3.CreateDocumentChange(ctx, &documents.CreateDocumentChangeRequest{ - Account: alice.Account.Principal().String(), - Path: "", - Changes: []*documents.DocumentChange{ - {Op: &documents.DocumentChange_SetMetadata_{ - SetMetadata: &documents.DocumentChange_SetMetadata{Key: "title", Value: "Alice from the Wonderland"}, - }}, - {Op: &documents.DocumentChange_MoveBlock_{ - MoveBlock: &documents.DocumentChange_MoveBlock{BlockId: "b1", Parent: "", LeftSibling: ""}, - }}, - {Op: &documents.DocumentChange_ReplaceBlock{ - ReplaceBlock: &documents.Block{ - Id: "b1", - Type: "paragraph", - Text: "Hello", - }, - }}, - {Op: &documents.DocumentChange_MoveBlock_{ - MoveBlock: &documents.DocumentChange_MoveBlock{BlockId: "b2", Parent: "b1", LeftSibling: ""}, - }}, - {Op: &documents.DocumentChange_ReplaceBlock{ - ReplaceBlock: &documents.Block{ - Id: "b2", - Type: "paragraph", - Text: "World!", - }, - }}, - }, - SigningKeyName: "main", - }) + doc, err := dmn.RPC.DocumentsV3.CreateDocumentChange(ctx, newChangeBuilder(alice, "", "", "main"). + SetMetadata("title", "Alice from the Wonderland"). + MoveBlock("b1", "", ""). + ReplaceBlock("b1", "paragraph", "Hello"). + MoveBlock("b2", "b1", ""). + ReplaceBlock("b2", "paragraph", "World!"). + Build(), + ) require.NoError(t, err) want := &documents.Document{ - Account: alice.Account.Principal().String(), + Account: alice.String(), Path: "", Metadata: map[string]string{ "title": "Alice from the Wonderland", }, - Authors: []string{alice.Account.Principal().String()}, + Authors: []string{alice.String()}, Content: []*documents.BlockNode{ { Block: &documents.Block{ @@ -142,27 +179,21 @@ func TestDaemonUpdateProfile(t *testing.T) { Compare(t, "profile document must match") // Do another update. + { - doc, err := dmn.RPC.DocumentsV3.CreateDocumentChange(ctx, &documents.CreateDocumentChangeRequest{ - Account: alice.Account.Principal().String(), - Path: "", - BaseVersion: doc.Version, - Changes: []*documents.DocumentChange{ - {Op: &documents.DocumentChange_SetMetadata_{ - SetMetadata: &documents.DocumentChange_SetMetadata{Key: "title", Value: "Just Alice"}, - }}, - }, - SigningKeyName: "main", - }) + doc, err := dmn.RPC.DocumentsV3.CreateDocumentChange(ctx, newChangeBuilder(alice, "", doc.Version, "main"). + SetMetadata("title", "Just Alice"). + Build(), + ) require.NoError(t, err) want := &documents.Document{ - Account: alice.Account.Principal().String(), + Account: alice.String(), Path: "", Metadata: map[string]string{ "title": "Just Alice", }, - Authors: []string{alice.Account.Principal().String()}, + Authors: []string{alice.String()}, Content: []*documents.BlockNode{ { Block: &documents.Block{ @@ -868,3 +899,22 @@ func TestSubscriptions(t *testing.T) { }) require.Error(t, err) } + +func TestBug_BrokenFormattingAnnotations(t *testing.T) { + t.Parallel() + + dmn := makeTestApp(t, "alice", makeTestConfig(t), true) + ctx := context.Background() + alice := coretest.NewTester("alice").Account.Principal() + + doc, err := dmn.RPC.DocumentsV3.CreateDocumentChange(ctx, newChangeBuilder(alice, "", "", "main"). + SetMetadata("title", "Alice from the Wonderland"). + MoveBlock("b1", "", ""). + ReplaceBlock("b1", "paragraph", "Hello world", &documents.Annotation{Type: "bold", Starts: []int32{0}, Ends: []int32{5}}). + MoveBlock("b2", "b1", ""). + ReplaceBlock("b2", "paragraph", "World!"). + Build(), + ) + require.NoError(t, err) + require.NotNil(t, doc) +} diff --git a/backend/genproto/documents/v3alpha/documents.pb.go b/backend/genproto/documents/v3alpha/documents.pb.go index eee5652f..1734dd85 100644 --- a/backend/genproto/documents/v3alpha/documents.pb.go +++ b/backend/genproto/documents/v3alpha/documents.pb.go @@ -997,7 +997,7 @@ type Block struct { Text string `protobuf:"bytes,3,opt,name=text,proto3" json:"text,omitempty"` // Optional. The hyperlink to an external resource. // Must be a valid URL. - Ref string `protobuf:"bytes,7,opt,name=ref,proto3" json:"ref,omitempty"` + Link string `protobuf:"bytes,7,opt,name=link,proto3" json:"link,omitempty"` // Arbitrary attributes of the block. Attributes map[string]string `protobuf:"bytes,4,rep,name=attributes,proto3" json:"attributes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // Annotation "layers" of the block. @@ -1060,9 +1060,9 @@ func (x *Block) GetText() string { return "" } -func (x *Block) GetRef() string { +func (x *Block) GetLink() string { if x != nil { - return x.Ref + return x.Link } return "" } @@ -1105,7 +1105,7 @@ type Annotation struct { Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` // Optional. A hyperlink to an external resource. // Must be a valid URL. - Ref string `protobuf:"bytes,5,opt,name=ref,proto3" json:"ref,omitempty"` + Link string `protobuf:"bytes,5,opt,name=link,proto3" json:"link,omitempty"` // Arbitrary key-value attributes of the annotation. Attributes map[string]string `protobuf:"bytes,2,rep,name=attributes,proto3" json:"attributes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // Start offsets of possibly disjoint spans of text for which this annotation is applied. @@ -1155,9 +1155,9 @@ func (x *Annotation) GetType() string { return "" } -func (x *Annotation) GetRef() string { +func (x *Annotation) GetLink() string { if x != nil { - return x.Ref + return x.Link } return "" } @@ -1580,121 +1580,121 @@ var file_documents_v3alpha_documents_proto_rawDesc = []byte{ 0x20, 0x03, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x08, 0x63, 0x68, 0x69, - 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0xc9, 0x02, 0x0a, 0x05, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, + 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x22, 0xcb, 0x02, 0x0a, 0x05, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x65, 0x78, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x65, 0x66, 0x18, 0x07, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x72, 0x65, 0x66, 0x12, 0x51, 0x0a, 0x0a, 0x61, 0x74, 0x74, - 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, - 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, - 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, - 0x2e, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, - 0x52, 0x0a, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x12, 0x48, 0x0a, 0x0b, - 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x26, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, - 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x41, - 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0b, 0x61, 0x6e, 0x6e, 0x6f, 0x74, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x76, 0x69, 0x73, 0x69, - 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x72, 0x65, 0x76, 0x69, 0x73, 0x69, - 0x6f, 0x6e, 0x1a, 0x3d, 0x0a, 0x0f, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, - 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, + 0x09, 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x18, + 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x12, 0x51, 0x0a, 0x0a, 0x61, + 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x31, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, + 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x42, 0x6c, 0x6f, + 0x63, 0x6b, 0x2e, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x45, 0x6e, 0x74, + 0x72, 0x79, 0x52, 0x0a, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x12, 0x48, + 0x0a, 0x0b, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x05, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, + 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, + 0x2e, 0x41, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0b, 0x61, 0x6e, 0x6e, + 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x72, 0x65, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x1a, 0x3d, 0x0a, 0x0f, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, + 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, + 0x02, 0x38, 0x01, 0x22, 0xf7, 0x01, 0x0a, 0x0a, 0x41, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x18, 0x05, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x12, 0x56, 0x0a, 0x0a, 0x61, 0x74, + 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x36, + 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x41, 0x6e, 0x6e, 0x6f, + 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, + 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, + 0x65, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x72, 0x74, 0x73, 0x18, 0x03, 0x20, 0x03, + 0x28, 0x05, 0x52, 0x06, 0x73, 0x74, 0x61, 0x72, 0x74, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x65, 0x6e, + 0x64, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x05, 0x52, 0x04, 0x65, 0x6e, 0x64, 0x73, 0x1a, 0x3d, + 0x0a, 0x0f, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, + 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, + 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xd3, 0x03, + 0x0a, 0x0e, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, + 0x12, 0x5b, 0x0a, 0x0c, 0x73, 0x65, 0x74, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x36, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, + 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, + 0x70, 0x68, 0x61, 0x2e, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x68, 0x61, 0x6e, + 0x67, 0x65, 0x2e, 0x53, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x48, 0x00, + 0x52, 0x0b, 0x73, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x55, 0x0a, + 0x0a, 0x6d, 0x6f, 0x76, 0x65, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x34, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, + 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x44, + 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x2e, 0x4d, 0x6f, + 0x76, 0x65, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x00, 0x52, 0x09, 0x6d, 0x6f, 0x76, 0x65, 0x42, + 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x48, 0x0a, 0x0d, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x5f, + 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x63, 0x6f, + 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, + 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x00, + 0x52, 0x0c, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x23, + 0x0a, 0x0c, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0b, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x42, 0x6c, + 0x6f, 0x63, 0x6b, 0x1a, 0x61, 0x0a, 0x09, 0x4d, 0x6f, 0x76, 0x65, 0x42, 0x6c, 0x6f, 0x63, 0x6b, + 0x12, 0x19, 0x0a, 0x08, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x07, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x49, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x70, + 0x61, 0x72, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x70, 0x61, 0x72, + 0x65, 0x6e, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x6c, 0x65, 0x66, 0x74, 0x5f, 0x73, 0x69, 0x62, 0x6c, + 0x69, 0x6e, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x6c, 0x65, 0x66, 0x74, 0x53, + 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, 0x1a, 0x35, 0x0a, 0x0b, 0x53, 0x65, 0x74, 0x4d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, - 0x01, 0x22, 0xf5, 0x01, 0x0a, 0x0a, 0x41, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, - 0x74, 0x79, 0x70, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x65, 0x66, 0x18, 0x05, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x03, 0x72, 0x65, 0x66, 0x12, 0x56, 0x0a, 0x0a, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, - 0x75, 0x74, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x36, 0x2e, 0x63, 0x6f, 0x6d, - 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, - 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x41, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x2e, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x45, 0x6e, 0x74, - 0x72, 0x79, 0x52, 0x0a, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x12, 0x16, - 0x0a, 0x06, 0x73, 0x74, 0x61, 0x72, 0x74, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x05, 0x52, 0x06, - 0x73, 0x74, 0x61, 0x72, 0x74, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x65, 0x6e, 0x64, 0x73, 0x18, 0x04, - 0x20, 0x03, 0x28, 0x05, 0x52, 0x04, 0x65, 0x6e, 0x64, 0x73, 0x1a, 0x3d, 0x0a, 0x0f, 0x41, 0x74, - 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, - 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, - 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xd3, 0x03, 0x0a, 0x0e, 0x44, 0x6f, - 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x5b, 0x0a, 0x0c, - 0x73, 0x65, 0x74, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x36, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, - 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, - 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x2e, 0x53, - 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x48, 0x00, 0x52, 0x0b, 0x73, 0x65, - 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x55, 0x0a, 0x0a, 0x6d, 0x6f, 0x76, - 0x65, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x34, 0x2e, - 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, - 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x44, 0x6f, 0x63, 0x75, 0x6d, - 0x65, 0x6e, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x2e, 0x4d, 0x6f, 0x76, 0x65, 0x42, 0x6c, - 0x6f, 0x63, 0x6b, 0x48, 0x00, 0x52, 0x09, 0x6d, 0x6f, 0x76, 0x65, 0x42, 0x6c, 0x6f, 0x63, 0x6b, - 0x12, 0x48, 0x0a, 0x0d, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x5f, 0x62, 0x6c, 0x6f, 0x63, - 0x6b, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x04, 0x0a, + 0x02, 0x6f, 0x70, 0x32, 0xc6, 0x05, 0x0a, 0x09, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, + 0x73, 0x12, 0x63, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, + 0x12, 0x2e, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x47, 0x65, + 0x74, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x24, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x44, 0x6f, + 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x75, 0x0a, 0x14, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, + 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x37, + 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x43, 0x72, 0x65, 0x61, + 0x74, 0x65, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x24, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, - 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x00, 0x52, 0x0c, 0x72, 0x65, - 0x70, 0x6c, 0x61, 0x63, 0x65, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x23, 0x0a, 0x0c, 0x64, 0x65, - 0x6c, 0x65, 0x74, 0x65, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, - 0x48, 0x00, 0x52, 0x0b, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x1a, - 0x61, 0x0a, 0x09, 0x4d, 0x6f, 0x76, 0x65, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x19, 0x0a, 0x08, - 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, - 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x49, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x70, 0x61, 0x72, 0x65, 0x6e, - 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x12, - 0x21, 0x0a, 0x0c, 0x6c, 0x65, 0x66, 0x74, 0x5f, 0x73, 0x69, 0x62, 0x6c, 0x69, 0x6e, 0x67, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x6c, 0x65, 0x66, 0x74, 0x53, 0x69, 0x62, 0x6c, 0x69, - 0x6e, 0x67, 0x1a, 0x35, 0x0a, 0x0b, 0x53, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, - 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x04, 0x0a, 0x02, 0x6f, 0x70, 0x32, - 0xc6, 0x05, 0x0a, 0x09, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x63, 0x0a, - 0x0b, 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x2e, 0x2e, 0x63, - 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, - 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, - 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x24, 0x2e, 0x63, + 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x5b, 0x0a, + 0x0e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x12, + 0x31, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, + 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x44, 0x65, 0x6c, + 0x65, 0x74, 0x65, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x12, 0x74, 0x0a, 0x0d, 0x4c, 0x69, + 0x73, 0x74, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x30, 0x2e, 0x63, 0x6f, + 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, + 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x44, 0x6f, 0x63, + 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x31, 0x2e, + 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, + 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x44, + 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x80, 0x01, 0x0a, 0x11, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x44, 0x6f, 0x63, + 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x34, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, + 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, + 0x70, 0x68, 0x61, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x44, 0x6f, 0x63, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x35, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, - 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, - 0x6e, 0x74, 0x12, 0x75, 0x0a, 0x14, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x44, 0x6f, 0x63, 0x75, - 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x37, 0x2e, 0x63, 0x6f, 0x6d, - 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, - 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x44, 0x6f, - 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x1a, 0x24, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, - 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, - 0x2e, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x5b, 0x0a, 0x0e, 0x44, 0x65, 0x6c, - 0x65, 0x74, 0x65, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x31, 0x2e, 0x63, 0x6f, + 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x6f, + 0x6f, 0x74, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x86, 0x01, 0x0a, 0x13, 0x4c, 0x69, 0x73, 0x74, 0x44, 0x6f, 0x63, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x12, 0x36, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, - 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x44, - 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, - 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, - 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x12, 0x74, 0x0a, 0x0d, 0x4c, 0x69, 0x73, 0x74, 0x44, 0x6f, - 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x30, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, - 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, - 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, - 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x31, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, - 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, - 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x44, 0x6f, 0x63, 0x75, 0x6d, - 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x80, 0x01, 0x0a, - 0x11, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, - 0x74, 0x73, 0x12, 0x34, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, - 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, - 0x4c, 0x69, 0x73, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, - 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x35, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, - 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x33, - 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x44, 0x6f, - 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, - 0x86, 0x01, 0x0a, 0x13, 0x4c, 0x69, 0x73, 0x74, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, - 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x12, 0x36, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, - 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, - 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, - 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x37, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, 0x6f, 0x63, 0x75, 0x6d, - 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x4c, 0x69, 0x73, - 0x74, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x33, 0x5a, 0x31, 0x73, 0x65, 0x65, 0x64, - 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x67, 0x65, 0x6e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x2f, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2f, 0x76, 0x33, 0x61, 0x6c, - 0x70, 0x68, 0x61, 0x3b, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x62, 0x06, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x44, 0x6f, 0x63, + 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x37, 0x2e, 0x63, 0x6f, 0x6d, 0x2e, 0x73, 0x65, 0x65, 0x64, 0x2e, 0x64, + 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, + 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x43, 0x68, 0x61, + 0x6e, 0x67, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x33, 0x5a, 0x31, + 0x73, 0x65, 0x65, 0x64, 0x2f, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x2f, 0x67, 0x65, 0x6e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x2f, + 0x76, 0x33, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x3b, 0x64, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, + 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/backend/manual_test.go b/backend/manual_test.go index 2318aba5..2c7e6b58 100644 --- a/backend/manual_test.go +++ b/backend/manual_test.go @@ -2,8 +2,8 @@ package backend import ( "context" - "seed/backend/core" "seed/backend/blob" + "seed/backend/core" "seed/backend/storage" "seed/backend/testutil" "seed/backend/util/must" diff --git a/backend/mttnet/mttnet.go b/backend/mttnet/mttnet.go index 8bc03e3d..bee28bec 100644 --- a/backend/mttnet/mttnet.go +++ b/backend/mttnet/mttnet.go @@ -45,7 +45,7 @@ const ProtocolSupportKey = "seed-support" // This is what we use as a key to pro const ( protocolPrefix = "/hypermedia/" - protocolVersion = "0.4.1" + protocolVersion = "0.7.0" ) var userAgent = "seed/" diff --git a/backend/storage/storage_migrations.go b/backend/storage/storage_migrations.go index 0108b1fe..987e4494 100644 --- a/backend/storage/storage_migrations.go +++ b/backend/storage/storage_migrations.go @@ -50,56 +50,8 @@ type migration struct { // // Migrations should be idempotant as much as we can make them, to prevent issues with partially applied migrations. var migrations = []migration{ - // New beginning. While we're doing the HM24 migration we can still make some breaking changes. - // TODO(burdiyan): add a real version when we are ready to release. - {Version: "2024-08-31.hm24-dev-1", Run: func(_ *Store, _ *sqlite.Conn) error { - return nil - }}, - {Version: "2024-09-06.01", Run: func(_ *Store, conn *sqlite.Conn) error { - if err := sqlitex.ExecScript(conn, sqlfmt(` - ALTER TABLE blobs RENAME COLUMN data TO data_old; - ALTER TABLE blobs ADD COLUMN data BLOB; - UPDATE blobs SET data = data_old, data_old = NULL; - ALTER TABLE blobs DROP COLUMN data_old; - - DROP TABLE IF EXISTS resource_heads; - - CREATE INDEX structural_blobs_by_resource ON structural_blobs (resource); - CREATE INDEX structural_blobs_by_author ON structural_blobs (author); - CREATE INDEX resources_by_genesis_blob ON resources (genesis_blob); - CREATE INDEX structural_blobs_by_genesis_blob ON structural_blobs (genesis_blob); - `)); err != nil { - return err - } - - return nil - }}, - {Version: "2024-09-23.01", Run: func(_ *Store, conn *sqlite.Conn) error { - if err := sqlitex.ExecScript(conn, sqlfmt(` - ALTER TABLE peers ADD COLUMN explicitly_connected BOOLEAN DEFAULT false NOT NULL; - `)); err != nil { - return err - } - - return nil - }}, - {Version: "2024-09-26.01", Run: func(_ *Store, conn *sqlite.Conn) error { - if err := sqlitex.ExecScript(conn, sqlfmt(` - ALTER TABLE peers RENAME TO peers_old; - CREATE TABLE peers( - id INTEGER PRIMARY KEY, - pid TEXT UNIQUE NOT NULL, - addresses TEXT UNIQUE NOT NULL, - explicitly_connected BOOLEAN DEFAULT false NOT NULL, - created_at INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL, - updated_at INTEGER DEFAULT (strftime('%s', 'now')) NOT NULL - ); - INSERT into peers(pid, addresses, explicitly_connected) select pid, addresses, explicitly_connected from peers_old; - DROP TABLE peers_old; - `)); err != nil { - return err - } - + // New beginning. + {Version: "2024-10-14.01", Run: func(_ *Store, _ *sqlite.Conn) error { return nil }}, } diff --git a/backend/storage/testdata/seed-test-db-snapshot/VERSION b/backend/storage/testdata/seed-test-db-snapshot/VERSION index 267e10df..52aadce4 100644 --- a/backend/storage/testdata/seed-test-db-snapshot/VERSION +++ b/backend/storage/testdata/seed-test-db-snapshot/VERSION @@ -1 +1 @@ -2024-08-31.hm24-dev-1 \ No newline at end of file +2024-10-14.01 \ No newline at end of file diff --git a/backend/storage/testdata/seed-test-db-snapshot/db/db.sqlite-shm b/backend/storage/testdata/seed-test-db-snapshot/db/db.sqlite-shm index eeac3357..25af0d34 100644 Binary files a/backend/storage/testdata/seed-test-db-snapshot/db/db.sqlite-shm and b/backend/storage/testdata/seed-test-db-snapshot/db/db.sqlite-shm differ diff --git a/backend/storage/testdata/seed-test-db-snapshot/db/db.sqlite-wal b/backend/storage/testdata/seed-test-db-snapshot/db/db.sqlite-wal index e9f9d15b..334f4e87 100644 Binary files a/backend/storage/testdata/seed-test-db-snapshot/db/db.sqlite-wal and b/backend/storage/testdata/seed-test-db-snapshot/db/db.sqlite-wal differ diff --git a/backend/util/btree/btree.go b/backend/util/btree/btree.go new file mode 100644 index 00000000..06ef0ca4 --- /dev/null +++ b/backend/util/btree/btree.go @@ -0,0 +1,146 @@ +// Package btree provides a B-Tree map wrapper for an existing library, exposing a simpler and more convenient API. +package btree + +import ( + "iter" + + "github.com/tidwall/btree" +) + +// Map is a B-Tree map data structure. +type Map[K, V any] struct { + hint btree.PathHint + tr *btree.BTreeG[node[K, V]] + cmp func(K, K) int +} + +type node[K, V any] struct { + k K + v V +} + +func newNode[K, V any](k K, v V) node[K, V] { + return node[K, V]{k: k, v: v} +} + +// New creates a new B-Tree map. +func New[K, V any](degree int, cmp func(K, K) int) *Map[K, V] { + tr := btree.NewBTreeGOptions( + func(a, b node[K, V]) bool { + return cmp(a.k, b.k) < 0 + }, + btree.Options{ + NoLocks: true, + Degree: degree, + }, + ) + + return &Map[K, V]{ + tr: tr, + cmp: cmp, + } +} + +// Set key k to value v. +func (b *Map[K, V]) Set(k K, v V) (replaced bool) { + _, replaced = b.tr.SetHint(newNode(k, v), &b.hint) + return replaced +} + +// Swap is like Set but returns the previous value if any. +func (b *Map[K, V]) Swap(k K, v V) (prev V, replaced bool) { + oldNode, replaced := b.tr.SetHint(newNode(k, v), &b.hint) + return oldNode.v, replaced +} + +// GetMaybe returns the value at k, or a zero value if k is not set. +// Use Get if you want to distinguish between the zero value and the key not existing. +func (b *Map[K, V]) GetMaybe(k K) (v V) { + b.tr.AscendHint(node[K, V]{k: k}, func(item node[K, V]) bool { + if b.cmp(item.k, k) == 0 { + v = item.v + } + return false + }, &b.hint) + + return v +} + +// Get the value by key k. +func (b *Map[K, V]) Get(k K) (v V, ok bool) { + b.tr.AscendHint(node[K, V]{k: k}, func(item node[K, V]) bool { + if b.cmp(item.k, k) == 0 { + v = item.v + ok = true + } + return false + }, &b.hint) + + return v, ok +} + +// GetAtMaybe is like GetAt, but returns the zero value if key is not set. +func (b *Map[K, V]) GetAtMaybe(idx int) (k K, v V) { + n, _ := b.tr.GetAt(idx) + return n.k, n.v +} + +// GetAt returns the key-value pair at index idx. +func (b *Map[K, V]) GetAt(idx int) (k K, v V, ok bool) { + n, ok := b.tr.GetAt(idx) + return n.k, n.v, ok +} + +// Len returns the number of elements in the B-Tree. +func (b *Map[K, V]) Len() int { + return b.tr.Len() +} + +// Items returns an iterator for map key-value items. +func (b *Map[K, V]) Items() iter.Seq2[K, V] { + return func(yield func(K, V) bool) { + b.tr.AscendHint(node[K, V]{}, func(item node[K, V]) bool { + return yield(item.k, item.v) + }, &b.hint) + } +} + +// Seek returns an iterator for records starting from the given key (inclusive). +func (b *Map[K, V]) Seek(k K) iter.Seq2[K, V] { + return func(yield func(K, V) bool) { + b.tr.AscendHint(node[K, V]{k: k}, func(item node[K, V]) bool { + return yield(item.k, item.v) + }, &b.hint) + } +} + +// SeekReverse is like Seek, but in reverse order. +func (b *Map[K, V]) SeekReverse(k K) iter.Seq2[K, V] { + return func(yield func(K, V) bool) { + b.tr.DescendHint(node[K, V]{k: k}, func(item node[K, V]) bool { + return yield(item.k, item.v) + }, &b.hint) + } +} + +// Keys returns a slice of keys in the B-Tree in order. +func (b *Map[K, V]) Keys() []K { + keys := make([]K, 0, b.Len()) + for k := range b.Items() { + keys = append(keys, k) + } + return keys +} + +// Clear all elements in the map. +func (b *Map[K, V]) Clear() { + b.tr.Clear() +} + +// Copy performs an efficient structural copying of the map. +func (b *Map[K, V]) Copy() *Map[K, V] { + return &Map[K, V]{ + tr: b.tr.Copy(), + cmp: b.cmp, + } +} diff --git a/backend/util/btree/btree_test.go b/backend/util/btree/btree_test.go new file mode 100644 index 00000000..9d88eab9 --- /dev/null +++ b/backend/util/btree/btree_test.go @@ -0,0 +1,18 @@ +package btree + +import ( + "fmt" + "strings" + "testing" +) + +func TestCopyOnWrite(t *testing.T) { + bt := New[string, string](8, strings.Compare) + + bt.Set("a", "Hello") + bt.Set("b", "World") + + bt.Copy().Set("a", "Changed") + + fmt.Println(bt.GetMaybe("a")) +} diff --git a/backend/util/cclock/cclock.go b/backend/util/cclock/cclock.go new file mode 100644 index 00000000..3079ad17 --- /dev/null +++ b/backend/util/cclock/cclock.go @@ -0,0 +1,89 @@ +// Package cclock provides a causal clock. +// It ensure monotonicity of the timestamps. +package cclock + +import ( + "fmt" + "time" +) + +// Default values for the created clock. +// You can change them on the created clock if necessary. +const ( + DefaultPrecision = time.Millisecond + DefaultSkewThreshold = time.Second * 40 // Quite arbitrary. +) + +// Clock issues timestamps that are guaranteed to be greater than any previously observed timestamp, +// unless the local clock skew is greater than the configured threshold. +// Use New() to create clocks. +type Clock struct { + maxTime time.Time + NowFunc func() time.Time + Precision time.Duration + SkewThreshold time.Duration +} + +// New creates a new Clock with default configuration. +func New() *Clock { + return &Clock{ + NowFunc: time.Now, + Precision: DefaultPrecision, + SkewThreshold: DefaultSkewThreshold, + } +} + +// Track a timestamp observed elsewhere. +func (c *Clock) Track(t time.Time) error { + t = t.Round(c.Precision) + now := c.now() + + if t.Sub(now) >= c.SkewThreshold { + return fmt.Errorf("tracked timestamp %s is way ahead of the local time %s", t, now) + } + + c.track(t) + return nil +} + +func (c *Clock) track(t time.Time) { + t = t.Round(c.Precision) + if t.After(c.maxTime.Round(c.Precision)) { + c.maxTime = t + } +} + +// Now creates a new timestamp for the current time, +// ensuring it's greater than any previously tracked timestamps. +func (c *Clock) Now() (time.Time, error) { + now := c.now() + + // If local clock is less than max tracked timestamp, something is going wrong. + diff := c.maxTime.Sub(now) + if diff >= c.SkewThreshold { + return time.Time{}, fmt.Errorf("local clock %s is way behind the maximum tracked timestamp %s", now, c.maxTime) + } + + if diff >= 0 { + now = now.Add(diff + 1*c.Precision).Round(c.Precision) + if !c.maxTime.Before(now) { + panic("BUG: can't generate a good timestamp after adjusting") + } + } + + c.track(now) + return now, nil +} + +// MustNow is like Now(), but panics in case of untolerable clock skew. +func (c *Clock) MustNow() time.Time { + t, err := c.Now() + if err != nil { + panic(err) + } + return t +} + +func (c *Clock) now() time.Time { + return c.NowFunc().Round(c.Precision) +} diff --git a/backend/util/cclock/cclock_test.go b/backend/util/cclock/cclock_test.go new file mode 100644 index 00000000..914bd476 --- /dev/null +++ b/backend/util/cclock/cclock_test.go @@ -0,0 +1,48 @@ +package cclock + +import ( + "testing" + "time" + + "github.com/stretchr/testify/require" +) + +func TestPrecision(t *testing.T) { + c1 := New() + c2 := New() + + t1 := c1.MustNow() + time.Sleep(c1.Precision) + t2 := c2.MustNow() + + require.Greater(t, t2, t1, "second timestamp must be greater than the first one even in unrelated clocks") +} + +func TestClockCausality(t *testing.T) { + clock := New() + + // Number of iterations is arbitrary. + var last time.Time + for i := 0; i < int(clock.SkewThreshold/clock.Precision); i++ { + tt := clock.MustNow() + if !last.Before(tt) { + t.Fatalf("incorrect causality: prev=%s, current=%s %d", last, tt, i) + } + + last = tt + } +} + +func TestTrack(t *testing.T) { + clock := New() + + t1 := clock.MustNow() + t2 := t1.Add(2 * clock.SkewThreshold) + + require.Error(t, clock.Track(t2), "tracking a timestamp from the future must fail if exceeds the tolerance threshold") + + t3 := t1.Add(3 * clock.Precision) + require.NoError(t, clock.Track(t3)) + + require.Equal(t, t3, clock.maxTime) +} diff --git a/backend/util/colx/slice.go b/backend/util/colx/slice.go index 2b49cd95..50e4a004 100644 --- a/backend/util/colx/slice.go +++ b/backend/util/colx/slice.go @@ -1,6 +1,33 @@ package colx -import "fmt" +import ( + "fmt" + "slices" +) + +type Slice[T any] []T + +// WrapSlice wraps a standard slice into a Slice, +// which exposes various convenience methods for slice operations. +func WrapSlice[S ~[]E, E any](in S) Slice[E] { + return Slice[E](in) +} + +// Sort the slice in places using the provided comparison function, +// and returns the sorted result to allow chaining. +func (s Slice[T]) Sort(cmp func(T, T) int) Slice[T] { + slices.SortFunc(s, cmp) + return s +} + +// GetMaybe returns the element at the given index, +// or a zero value if the index is out of bounds. +func (s Slice[T]) GetMaybe(i int) T { + if i < 0 || i >= len(s) { + return *new(T) + } + return s[i] +} // SliceMap applies a map function to each element of the slice // and produces a new slice with (possibly) transformed value. diff --git a/backend/util/lookup/lookup.go b/backend/util/lookup/lookup.go new file mode 100644 index 00000000..76a9de16 --- /dev/null +++ b/backend/util/lookup/lookup.go @@ -0,0 +1,61 @@ +// Package lookup provides a simple utility for building lookup tables, +// useful when encoding repeated values in a file. +package lookup + +import ( + "maps" + "slices" + "strconv" +) + +// Table is a sorted list of unique values. +// Elsewhere, there should be a Pointer, which is an index into this list. +// Table must be constructed using a [Builder]. +type Table[T any] []T + +// Get value from the table given a pointer. +func (lt Table[T]) Get(i *Pointer) T { + return lt[*i] +} + +// Pointer is an index into the lookup table. +type Pointer int + +func (lp *Pointer) String() string { + return strconv.Itoa(int(*lp)) +} + +// Builder for the lookup table. +type Builder[T comparable] struct { + dict map[T]*Pointer +} + +// Add value into the table and return its pointer. +// If the value already exists, return the existing pointer. +func (ltb *Builder[T]) Add(v T) *Pointer { + if ltb.dict == nil { + ltb.dict = make(map[T]*Pointer) + } + + lp, ok := ltb.dict[v] + if ok { + return lp + } + + idx := Pointer(len(ltb.dict)) + lp = &idx + ltb.dict[v] = lp + return lp +} + +// Build the table by sorting the values and updating the pointers +// with the final indices into the resulting table. +func (ltb *Builder[T]) Build(cmp func(T, T) int) Table[T] { + out := slices.Collect(maps.Keys(ltb.dict)) + slices.SortFunc(out, cmp) + for i, v := range out { + ptr := ltb.dict[v] + *ptr = Pointer(i) + } + return out +} diff --git a/backend/util/lookup/lookup_test.go b/backend/util/lookup/lookup_test.go new file mode 100644 index 00000000..46a1994d --- /dev/null +++ b/backend/util/lookup/lookup_test.go @@ -0,0 +1,28 @@ +package lookup + +import ( + "cmp" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestPointer(t *testing.T) { + var ltb Builder[string] + + // Input values to be added to the lookup table. + // They are not used in order, so we'll get the pointers into the table, + // which will be replaced after the final table is built and sorted. + in := []string{"hey", "hey", "ho", "alice", "z"} + ptrs := make([]*Pointer, len(in)) + for i, v := range in { + ptrs[i] = ltb.Add(v) + } + + lookup := ltb.Build(cmp.Compare) + + for i, v := range in { + ptr := ptrs[i] + require.Equal(t, v, lookup.Get(ptr)) + } +} diff --git a/backend/util/sqlite/blob_test.go b/backend/util/sqlite/blob_test.go index e45036e4..926292f2 100644 --- a/backend/util/sqlite/blob_test.go +++ b/backend/util/sqlite/blob_test.go @@ -17,6 +17,7 @@ package sqlite_test import ( "bytes" "compress/gzip" + "fmt" "io" "io/ioutil" "reflect" @@ -214,7 +215,7 @@ func TestConcurrentBlobWrites(t *testing.T) { c, err := sqlite.OpenConn("file::memory:?mode=memory", flags) if err != nil { - t.Fatal(err) + panic(err) } defer c.Close() @@ -233,7 +234,7 @@ func TestConcurrentBlobWrites(t *testing.T) { return } if n != len(b) { - t.Fatalf("n=%d, want %d (i=%d, j=%d)", n, len(b), i, j) + panic(fmt.Errorf("n=%d, want %d (i=%d, j=%d)", n, len(b), i, j)) } } }(i) @@ -386,7 +387,7 @@ func TestBlobPtrs(t *testing.T) { if err != nil { t.Fatal(err) } - b, err := ioutil.ReadAll(gzr) + b, err := io.ReadAll(gzr) if err != nil { t.Fatal(err) } diff --git a/docs/docs/hypermedia-protocol/accounts.md b/docs/docs/hypermedia-protocol/accounts.md new file mode 100644 index 00000000..219eba05 --- /dev/null +++ b/docs/docs/hypermedia-protocol/accounts.md @@ -0,0 +1,25 @@ +# Accounts + +An account is a key pair that is used to identify a person or a publisher. + +## Private Key + +## Account Mnemonics + +The Mnemonics are a 12-word combination of words, representing 132 bits of entropy (4 bytes). + +This resulting 4 byte binary value is the Seed value that can be used to derive the [Private Key](#private-key). + +## Key Derivation + +Keys are derived using [SLIP-010](https://github.com/satoshilabs/slips/blob/master/slip-0010.md), with the following derivation path: `m/44'/104109'/0'` + +The resulting key pair is a [Ed25519](https://en.wikipedia.org/wiki/EdDSA#Ed25519) + + +## Account ID + +The ID of an account is the Public Key that can be derived from the [Private Key](#private-key). + +An Account ID is the string that results by encoding the Public Key with the [base58btc multibase encoding](https://github.com/multiformats/multibase). + diff --git a/docs/docs/hypermedia-protocol/binary-data.md b/docs/docs/hypermedia-protocol/binary-data.md new file mode 100644 index 00000000..84d49dd2 --- /dev/null +++ b/docs/docs/hypermedia-protocol/binary-data.md @@ -0,0 +1,21 @@ +# Raw Permanent Data + +In IPLD DAG_CBOR there is a way to encode raw data by specifying an object with `{ "/": { "bytes": } }` + +This is used to encode raw binary data inside [Hypermedia Permanent Data](./permanent-data.md). + +## Example + +For example a signature within a structured data block. The raw binary data is first encoded with [base64url (RFC 4648 §5)](https://datatracker.ietf.org/doc/html/rfc4648#section-5) so that it may be safely used in a string. + +Then the encoded value is inserted into a `{ "/": { "bytes": ... }}` data structure. So an example signature may look like this (in the JSON representation of the CBOR data): + +``` + "sig": { + "/": { + "bytes": "5gjnnpeM4WsfjtxZ7bVfojbK8lEG0i3ypypAORjiuLVZXk0t2V/yFsyM6o0PsEp4OVdk2/XKfW7KOthp1FYODA" + } + }, +``` + +This approach is also used to encode account public keys ([Account IDs](./accounts.md#account-id)). \ No newline at end of file diff --git a/docs/docs/hypermedia-protocol/blob-capability.md b/docs/docs/hypermedia-protocol/blob-capability.md new file mode 100644 index 00000000..e6e6a216 --- /dev/null +++ b/docs/docs/hypermedia-protocol/blob-capability.md @@ -0,0 +1,35 @@ +# Capability Blob + +The Capability Blob is created by accounts to grant priveliges for another account to access or control an additional document (or tree of documents). + +## Blob Map Field + +- `@type: 'Capability'` +- `issuer` - [Raw Account ID](./raw-account-id.md) of the Account who is granting the capability +- `delegate` - [Raw Account ID](./raw-account-id.md) of the Account who is receiving the capability +- `space` - [Raw Account ID](./raw-account-id.md) of the Account that contains the document +- `path` - String of the path which identifies this document +- `role` - String to specify +- `ts` - [Timestamp](./timestamp.md) when this Capability was created +- `noRecursive` - Boolean. `True` means the capability only applies to this document. `False` allows this capability to apply to all children paths. +- `sig` - [Hypermedia Signature](./signature.md) of the other fields, signed by the `issuer` + +## Issuer + +The Issuer Account is the Account who is giving the role to the Delegate Account. + +## Delegate + +The Delegate Account is receiving additional capabilities for the Document, according to the Role. + +## Document Address + +The `space` and `path` of the document that the capability is granting access to. + +Often, the `space` may be identical to the `issuer`, when somebody grants priveliges to their own document. + +## Role + +String which describes the role that should be granted to the delegate (recipient) Account. + +## Recursion \ No newline at end of file diff --git a/docs/docs/hypermedia-protocol/blob-change.md b/docs/docs/hypermedia-protocol/blob-change.md new file mode 100644 index 00000000..300033cd --- /dev/null +++ b/docs/docs/hypermedia-protocol/blob-change.md @@ -0,0 +1,23 @@ +# Change Blob + +The Change Blob is used to describe how a document changes or is created. + +## Blob Map Field + +- `@type: 'Change'` +- `author` - Raw Account ID of the creator of the change +- `deps` - List of [Links](./ipld-link.md) to the previous Change Blobs +- `depth` - ??? +- `genesis` - [Link](./ipld-link.md) of the last Change Blob in the chain +- `ops` - List of [Document Operations](./document-operations.md) +- `ts` - [Timestamp](./timestamp.md) when this Change was created +- `sig` - [Hypermedia Signature](./signature.md) of the other fields, signed by the `author` + + +## Dependencies + +Previous Change Blobs of the Document. + +## Operations + +List of [Document Operations](./document-operations.md) which will modify the [Document State](./document-state.md) after interpreting the Change. \ No newline at end of file diff --git a/docs/docs/hypermedia-protocol/blob-comment.md b/docs/docs/hypermedia-protocol/blob-comment.md new file mode 100644 index 00000000..807e9c45 --- /dev/null +++ b/docs/docs/hypermedia-protocol/blob-comment.md @@ -0,0 +1,17 @@ +# Comment Blob + +A Comment Blob is content that is created from a single author. It is created in the context of a specific document, possibly in reply to another comment, which may form a tree of commentary. + +## Blob Map Field + +- `@type: 'Comment'` +- `capability` - [Link](./ipld-link.md) to the [Capability Blob](./blob-capability.md) which allows the user to write this comment (if necessary) +- `author` - [Raw Account ID](./raw-account-id.md) +- `space` - [Raw Account ID](./raw-account-id.md) +- `path` +- `version` +- `threadRoot` +- `replyParent` +- `body` - BlockNode +- `ts` - [Timestamp](./timestamp.md) when this Comment was created +- `sig` - [Hypermedia Signature](./signature.md) of the other fields, signed by the `author` diff --git a/docs/docs/hypermedia-protocol/blob-ref.md b/docs/docs/hypermedia-protocol/blob-ref.md new file mode 100644 index 00000000..c89d83ae --- /dev/null +++ b/docs/docs/hypermedia-protocol/blob-ref.md @@ -0,0 +1,27 @@ +# Ref Blob + +The Ref Blob is used by a document owner or contributer to point to the most recent version, according to the author, at the time of authoring. + +## Blob Map Field + +- `@type: 'Ref'` +- `space` - The Account ID +- `path` - The path for this document, within the Space +- `genesisBlob` - The first Change in the chain of changes for this Document +- `capability` - Reference to the Capability Blob, +- `heads` - List of References to Chagne CIDs that represent current Version +- `author` - [Raw Account ID](./raw-account-id.md) of the creator of the Ref +- `ts` - [Timestamp](./timestamp.md) when this Ref was created +- `sig` - [Hypermedia Signature](./signature.md) of the other fields, signed by the `author` + +## Document Addressing + +The Ref is created to show the latest version for a specific document. + +## Capability + +If the `author` is not + +## Heads + +The heads represent the Version of the document. They are [links](./ipld-link.md) refer to a set of [Change Blobs](./blob-change.md). \ No newline at end of file diff --git a/docs/docs/hypermedia-protocol/block-node.md b/docs/docs/hypermedia-protocol/block-node.md new file mode 100644 index 00000000..f1e294c8 --- /dev/null +++ b/docs/docs/hypermedia-protocol/block-node.md @@ -0,0 +1,7 @@ +# Block Node + +Contains the following fields: + +- `block` - The [Block content](./document-blocks.md) that will be displayed at this location +- `children` - An optional list of `BlockNodes` that are organized under this block + diff --git a/docs/docs/hypermedia-protocol/change-questions/renaming-refs.md b/docs/docs/hypermedia-protocol/change-questions/renaming-refs.md new file mode 100644 index 00000000..1a4ff5e1 --- /dev/null +++ b/docs/docs/hypermedia-protocol/change-questions/renaming-refs.md @@ -0,0 +1 @@ +Should we rename refs to claims? \ No newline at end of file diff --git a/docs/docs/hypermedia-protocol/change-questions/renaming-spaces.md b/docs/docs/hypermedia-protocol/change-questions/renaming-spaces.md new file mode 100644 index 00000000..aaae1187 --- /dev/null +++ b/docs/docs/hypermedia-protocol/change-questions/renaming-spaces.md @@ -0,0 +1,5 @@ +Do we rename spaces after hearing feedback that it is confusing? + +Do we keep the account name, even though it may be used for publishers and people? + +Naming of "Home Documents" is reportedly confusing, do we refer to it as a profile? \ No newline at end of file diff --git a/docs/docs/hypermedia-protocol/cid.md b/docs/docs/hypermedia-protocol/cid.md new file mode 100644 index 00000000..dc63e028 --- /dev/null +++ b/docs/docs/hypermedia-protocol/cid.md @@ -0,0 +1,7 @@ +# Content-Addressed Identifiers + +A CID is the addressability technique to access a file/blob of data in the [IPFS](./ipfs.md) ecosystem and Hypermedia Protocol.. Each file/blob is addressed with a CID, which is a hash of the content. + +Because each file is addressed by it's hash, you don't need to trust the peer who sends you the data. Once you receive the data you can check the hash, verify it matches the expected CID, and otherwise discard it. If the CID doesn't match, the peer who gave it to you made a mistake or is behaving poorly. (You can then try again, or disconnect from this peer.) + +This technique of addressing a chunk of content with its hash is called "content addressability", which is the foundational feature of IPFS and differentiates CIDs from regular IDs. \ No newline at end of file diff --git a/docs/docs/hypermedia-protocol/concepts.md b/docs/docs/hypermedia-protocol/concepts.md new file mode 100644 index 00000000..b8c53029 --- /dev/null +++ b/docs/docs/hypermedia-protocol/concepts.md @@ -0,0 +1,73 @@ +# Concepts + +## Account + +An account is used to identify a person, an organization, or an independent publisher in the Hypermedia network. Each account has a key pair: + +- Private Key - A secret piece of data which is a ED25519 key +- Public Key - The public key that corresponds to the private key. + +The public key is generally represented as a string which looks like `z6MkjPVNrNGA6dYEqkspBvfuTnAddw7hrNc5WM6dQDyuKkY3`. This is a multiformat string, and we conventionally use base58 encoding (which is why each account ID string starts with a `z`) + +Each account may be defined by a secret 12-word mnemonic which may be used recover the key pair. This is a BIP-39 series of words which may be converted into a binary data "seed". + +This seed is used as an input to a key derivation function for the ed25519 key, which uses the following derivation path: ``. This can be computed using a library such as `` or ``. + +## Document + +A document is a cohesive piece of content which contains metadata and a hierarchal list of [blocks](./document-blocks.md). + +In the [permanent data](./permanent-data.md), the document is represented as a series of [document operations](./document-operations.md) within Changes. After interpreting these operations, the you may arrive at the [state of a document](./document-state.md). + +## Home Document + +The Document with an empty path, which may be used as the profile or "home page" for an account. + +The Home Document is addressed with a Hypermedia URL of `hm://ACCOUNT_ID`, using the [Account ID](./accounts.md#account-id) + +## Owner + +The owner is the account who ultimately controls the document. If the Document URL is `hm://BOB_ACCOUNT_ID/my-document`, the owner is Bob. + +## Change + +A [Document Change](./blob-change.md) is a blob of data that describes how the content and metadata of a Document is changing. + +## Version + +The list of leaf Changes which represent a specific version of a document. + +The current Version is defined by the most recent set of Changes that has been designated by valid Refs. + +A Version may be specified in any [Hypermedia URL](./hypermedia-url.md#version). + +## Capability + +A capability gives one account additional priveliges in the context of another account. For example, Alice may have a document called "Best Books", which she wants to let Bob edit. So Alice may create a capability which roughly looks like this: + +``` +{ + "@type": "Capability", + "": "" +} +``` + +## Contributor + +An Account who has access to contribute to the Document. When they start signing Changes for this Document, they will become an Author. + +## Author + +An Account who has created a Change in the + +## Refs + +The pointer to the current Version of a document. Signed by the Document Owner + +Saved as [Ref Blobs](./blob-ref.md) and distributed through [IPFS](./ipfs.md) and + +## Capability + +## [Hypermedia URL](./hypermedia-url.md) + +The URL format for Hypermedia links. These links generally follow the form `hm://ACCOUNT_ID/PATH?PARAMS`. The params include metadata for referencing \ No newline at end of file diff --git a/docs/docs/hypermedia-protocol/document-blocks.md b/docs/docs/hypermedia-protocol/document-blocks.md new file mode 100644 index 00000000..780ae86a --- /dev/null +++ b/docs/docs/hypermedia-protocol/document-blocks.md @@ -0,0 +1,171 @@ +# Document Blocks + +The document content is a hierarchy of blocks. These are the supported block types and formatting attributes + +## Block Structure + +Every block must conform to the following attributes. Some fields are optional depending on the `type` + +- `id` - Unique ID for this block in the document +- `revision` - Revision identifier for this block. Must change whenever the block changes +- `text` - The text string that will be presented (only used for text blocks) +- `ref` - Pointer to the `hm://` Hypermedia URL, `ipfs://` url, or `https://` url, depending on the block types +- `annotations` - Map of [Text Annotations](#text-annotations) that will be used to enhance [text blocks](#text-blocks) +- `attributes` - Map of additional attributes based on the block type. Every value is a string + +## Block Types + +Each block must define the `type` field to designate one of the following supported block types: + +### Paragraph Block + +`type = "paragraph"` + +The base [Text Block](#text-blocks) which describes a paragraph in the document. + +### Heading Block + +`type = "heading"` + +A Text Block which is used to organize other content. As you might expect, it will be rendered with large text. + +Children blocks under a heading are not indented, because the large heading text provides enough visual organization for the reader. + +### Code Block + +`type = "code"` + +Embed some code that will be visible inside a document. The `text` field is used to contain the code that will be displayed. + +Note: The code block is not considered a [Text Block](#text-blocks), because the [Text Annotations](#text-annotations) are not available for formatting text. + +The following attributes are available for a code block: + +- `language` - The language that will be used for syntax highlighting. Should match one of the language values from the [`highlight.js` supported language list](https://github.com/highlightjs/highlight.js/blob/main/SUPPORTED_LANGUAGES.md) + +### Math Block + +`type = "math"` + +Embed a KaTeX string into a document that will be formatted into an equation. + +The `text` field contains the KaTeX value. + +### Image Block + +`type = "image"` + +Used to embed an image into a document. The block's `ref` field should point to an `ipfs://` URL. + +Allows text with annotations, which will be used as the image caption. + +### Video Block + +`type = "video"` + +Embed a video file into a document. The block's `ref` field should point to an `ipfs://` URL. + +Should be a `.mp4` or `.mov` file type for maximum compatibility. + +### File Block + +`type = "file"` + +Used to embed a file that can be downloaded from a document. The file should be uploaded to the IPFS node and the `ref` will point to the `ipfs://` url. + +The following attributes may be used for file blocks: + +- `name` - Specify the file name that will be displayed and used when the user downloads the file. + +### Embed Block + +`type = "embed"` + +Embed content from one Hypermedia Document into another. The `ref` must be a `hm://` [Hypermedia URL](./hypermedia-url). + +The following attributes may be used for `embed` blocks: + +- `view` - card or embed + +### Web Embed Block + +`type = "web-embed"` + +Used to embed web content into a document. + +`ref` is set to a `http`/`https` URL of the content that should be embedded. + +> Note: Seed Hypermedia does not support fully web embed blocks yet. We have had experimental support for Twitter URLs for embedding tweets into documents. + +## Block Children Types + +The children type is an attribute that is available on every block, which is used to describe how children blocks are organized. By default, children types are logically organized under a block. This will result in a visible indentation usually, except for heading blocks where the indentation is not necessesary. + +The `childrenType` attribute may be set to the following: + +- `group` - Default children behavior (indentation, except under headings) +- `ol` - Ordered List (Numbered Items) +- `ul` - Unordered List (Bullets) + +In the case of an `ol` (Numbered List), you may also set a ??? value to specify the starting number. + +## Block Classes + +Blocks may be categorized into text blocks, file blocks, or other blocks. + +### Text Blocks + +One of: + +- [`Heading`](#heading-block) +- [`Paragraph`](#paragraph-block) +- [`Image`](#image-block) + +Text blocks should have a value for the `text` field, and `annotations` are used to further describe the text content. + +### File Blocks + +One of: + +- [`Image`](#image-block) +- [`Video`](#video-block) +- [`File`](#file-block) + +File blocks must have a `ref` that points to an `ipfs://` url + +## Text Annotations + +These are the annotations that can be used for [text blocks](#text-blocks). + +### Bold Annotation + +`type: "bold"` + +### Italic Annotation + +`type: "italic"` + + +### Underline Annotation + +`type: "underline"` + +### Strikethrough Annotation + +`type: "strike"` + +### Code Annotation + +`type: "code"` + +### Link Annotation + +`type: "link"` + +`ref` is set + +### Inline Embed Annotation + +`type: "embed"` + +`\uFFFC` \ No newline at end of file diff --git a/docs/docs/hypermedia-protocol/document-operations.md b/docs/docs/hypermedia-protocol/document-operations.md new file mode 100644 index 00000000..c579b159 --- /dev/null +++ b/docs/docs/hypermedia-protocol/document-operations.md @@ -0,0 +1,36 @@ +# Document Operations + +Each document operation takes the following format: + +```json +{ + "op": "SetMetadata", + "attributes": { + "name": "New Document Name" + } +} +``` + +The `op` is used to specify which operation is being used. The `attributes` will mean different things, depending on the operation. + +## Set Metadata Operation + +`"op": "SetMetadata"` + +Attributes is a Map of new [Document Metadata](./document-state.md#document-metadata) fields that will be set. + +## Move Block Operation + +`"op": "MoveBlock"` + +Attributes: + +- `"block": ` +- `"location": "?@?"` +- `"parent": ""` + +## Replace Block Operation + +`"op": "ReplaceBlock"` + +Attributes are equal to the full [Block](./document-blocks.md) value. The block ID is used to specify which block is being replaced. \ No newline at end of file diff --git a/docs/docs/hypermedia-protocol/document-state.md b/docs/docs/hypermedia-protocol/document-state.md new file mode 100644 index 00000000..09f44fce --- /dev/null +++ b/docs/docs/hypermedia-protocol/document-state.md @@ -0,0 +1,23 @@ +# Document State + +A snapshot (or version) of a document may be represented with the following state: + +- `metadata` +- `content` + +## Document Metadata + +The document `metadata` may include the following fields to help describe the purpose of the document and how it should be treated. + +- `name` - The title of this document. Or if this is the Home Document, the name of this Account +- `cover` - the `ipfs://` url of the cover image for the document. The cover image is displayed at full width at the top of the document. +- `thumbnail` - the `ipfs://` url of the square image that is used as the thumbnail for this document or account. + +## Document Content + +Every document will contain a `content` list of [Block Nodes](./block-node.md). + + +## Example Document + +`TODO - show an example document value here` \ No newline at end of file diff --git a/docs/docs/hypermedia-protocol/hypermedia-url.md b/docs/docs/hypermedia-protocol/hypermedia-url.md new file mode 100644 index 00000000..20300fb9 --- /dev/null +++ b/docs/docs/hypermedia-protocol/hypermedia-url.md @@ -0,0 +1,36 @@ +# Hypermedia URLs + +These URLs are used to address content in the Hypermedia protocol. The URL generally follows the format: `hm://[ACCOUNT_ID]/[PATH]?[PARAMETERS]` + +## Protocol + +Hypermedia URLs are always prefixed with the `hm://` protocol, so they may be distinguished from other URL types such as `https://` and `ipfs://` + +## Account + +The first term of the Hypermedia URL is the only required part: an [Account ID](./accounts.md#account-id). + +A basic Hypermedia URL is `hm://z6MkjPVNrNGA6dYEqkspBvfuTnAddw7hrNc5WM6dQDyuKkY3` which refers to the [Home Document](./concepts.md#home-document) of this Account. + + +## Path + +If the path is missing, the URL will refer to the Home Document of that account. + +## Parameters + +Optional query parameters to be specified after the `?` part of the hypermedia URL. + +- `v` - Version +- `l` - Latest + +### Version + +This URL parameter will specify the lowest allowed version of the document. + +The version is defined as a list of Change CIDs, where the CIDs may be concatenated with a `.` character if there is more than one. + + +### Latest + +When this `?l` is specified, the reader should attempt to load the most recent version of the document. \ No newline at end of file diff --git a/docs/docs/hypermedia-protocol/improvements/capability-capabilities.md b/docs/docs/hypermedia-protocol/improvements/capability-capabilities.md new file mode 100644 index 00000000..24af650c --- /dev/null +++ b/docs/docs/hypermedia-protocol/improvements/capability-capabilities.md @@ -0,0 +1,3 @@ +Capabilities should be able to reference other capabilites. + +If Bob is granting write permission to Alice for the document `hm://CAROL/foo`, then Bob needs to prove his own capability over Carol's document. \ No newline at end of file diff --git a/docs/docs/hypermedia-protocol/improvements/versions.md b/docs/docs/hypermedia-protocol/improvements/versions.md new file mode 100644 index 00000000..f0a116b4 --- /dev/null +++ b/docs/docs/hypermedia-protocol/improvements/versions.md @@ -0,0 +1,5 @@ +# Upgrading Versions Language + +## Refs as Versions + +## Anonymous Ref Blobs \ No newline at end of file diff --git a/docs/docs/hypermedia-protocol/index.md b/docs/docs/hypermedia-protocol/index.md new file mode 100644 index 00000000..e76eb9af --- /dev/null +++ b/docs/docs/hypermedia-protocol/index.md @@ -0,0 +1,15 @@ +# Hypermedia Protocol + +The protocol is divided into several sections: + +## [1. Permanent Data](./permanent-data.md) + +Once you have the Hypermedia data, see this section to understand how it is organized, and how you can use it to interpret the content. + +## [2. Peer-to-Peer](./peer-to-peer.md) + +How you can connect and communicate with peers (servers or other people's computers). These connections will allow you to find specific content, see whats new, and discover new or related content. + +## [3. Web + HTTP Connectivity](./web.md) + +Connect to web sites to associate domains with Hypermedia content, and connect to peers who will serve it for you. \ No newline at end of file diff --git a/docs/docs/hypermedia-protocol/ipfs.md b/docs/docs/hypermedia-protocol/ipfs.md new file mode 100644 index 00000000..9a00d9a9 --- /dev/null +++ b/docs/docs/hypermedia-protocol/ipfs.md @@ -0,0 +1,5 @@ +# IPFS + +The "Interplanetary File System" is a protocol and network which enables the synchronization and storage of immutable data. + +Each chunk of data is called a "blob", and is identified by a [CID](./cid.md). \ No newline at end of file diff --git a/docs/docs/hypermedia-protocol/ipld-link.md b/docs/docs/hypermedia-protocol/ipld-link.md new file mode 100644 index 00000000..d0f3b529 --- /dev/null +++ b/docs/docs/hypermedia-protocol/ipld-link.md @@ -0,0 +1,2 @@ +# IPLD Link + diff --git a/docs/docs/hypermedia-protocol/peer-to-peer.md b/docs/docs/hypermedia-protocol/peer-to-peer.md new file mode 100644 index 00000000..786a8150 --- /dev/null +++ b/docs/docs/hypermedia-protocol/peer-to-peer.md @@ -0,0 +1,11 @@ +# Peer to Peer + +Connectivity between nodes of the network, powered by [libp2p](https://libp2p.io/). + +## Peer IDs + +## Amino Distributed Hash Table + +## GRPC over libp2p + +## Syncing Protocol \ No newline at end of file diff --git a/docs/docs/hypermedia-protocol/permanent-data.md b/docs/docs/hypermedia-protocol/permanent-data.md new file mode 100644 index 00000000..32cd0815 --- /dev/null +++ b/docs/docs/hypermedia-protocol/permanent-data.md @@ -0,0 +1,33 @@ +# Permanent Data + +Hypermedia content is expressed in a graph of connected [IPFS](./ipfs.md) files/blobs. Each blob is identified by a [CID](./cid.md). + +To understand the data of our network, you should first review the [Hypermedia concepts](./concepts.md), which will be heavily referenced. + +## Structured Data + +You will start to read content via structured data blobs, which are encoded using the [IPLD DAG-CBOR encoding](https://ipld.io/specs/codecs/dag-cbor/spec/). These blobs can be converted to JSON for easier readability, and our docs will generally show you the structured data in JSON format. + +Each of our structured data blobs contain a `@type` field, which designate how they should be interpreted. + +### [Change](./blob-change.md) + +The Change Blob is used to describe how a document changes or is created. + +### [Ref](./blob-ref.md) + +The Ref Blob is used by a document owner or contributer to point to the most recent version, according to the author, at the time of authoring. + +### [Capability](./blob-capability.md) + +The Capability Blob is created by accounts to grant priveliges for another account to access or control an additional document (or tree of documents). + +### [Comment](./blob-comment.md) + +A Comment Blob is content that is created from a single author. It is created in the context of a specific document, possibly in reply to another comment, which may form a tree of commentary. + +## Other IPFS files + +The structured data may refer to raw IPFS files, generally as a string prefixed with `ipfs://`. This is used when images, videos, or other files are included in documents and comments. + +These files are inserted directly into IPFS and are then referenced by their CID. \ No newline at end of file diff --git a/docs/docs/hypermedia-protocol/raw-account-id.md b/docs/docs/hypermedia-protocol/raw-account-id.md new file mode 100644 index 00000000..783c99c1 --- /dev/null +++ b/docs/docs/hypermedia-protocol/raw-account-id.md @@ -0,0 +1,13 @@ +# Raw Account ID + +This is generally how Accounts are identified in the first. + +The Account Public Key is directly embedded in [IPLD binary data](./binary-data.md). As JSON, an author may be encoded as such: + +```json + "author": { + "/": { + "bytes": "7QFJUy+fggkk/uHRKUeBRqNRrJdlc1ZCA0addDO+nwgPpA" + } + }, +``` \ No newline at end of file diff --git a/docs/docs/hypermedia-protocol/signature.md b/docs/docs/hypermedia-protocol/signature.md new file mode 100644 index 00000000..2634aed8 --- /dev/null +++ b/docs/docs/hypermedia-protocol/signature.md @@ -0,0 +1,15 @@ +# Hypermedia Signature + +The signature is the binary data that is resulting when you sign all the fields of a Blob using the [Account Private Key](./accounts.md#private-key), resulting in a Ed25519 Signature. + +When used in a blob, is encoded as [Binary Data](./binary-data.md). + +## Example Blob Signature + +```json + "sig": { + "/": { + "bytes": "3p8E1MnjnssfWAtgWH4D9dUJ6/iyKqOTxsBeOEaceYZAX5Y7E0NKyeqYW6X7qrVwB1woEtQKdH0djZ5eCnKLDw" + } + }, +``` \ No newline at end of file diff --git a/docs/docs/hypermedia-protocol/timestamp.md b/docs/docs/hypermedia-protocol/timestamp.md new file mode 100644 index 00000000..f1f20b0b --- /dev/null +++ b/docs/docs/hypermedia-protocol/timestamp.md @@ -0,0 +1,3 @@ +# Hypermedia Timestamp + +Integer in miliseconds since the Unix Epoch? \ No newline at end of file diff --git a/docs/docs/hypermedia-protocol/web.md b/docs/docs/hypermedia-protocol/web.md new file mode 100644 index 00000000..4d6b1eb3 --- /dev/null +++ b/docs/docs/hypermedia-protocol/web.md @@ -0,0 +1,7 @@ +# HTTP and Web Connectivity + +Hypermedia supports web connectivity to serve a few purposes: + +- Enable connectivity to a node who has the content of the sites +- Associate an Account with a domain +- Enable web browsers \ No newline at end of file diff --git a/frontend/apps/desktop/src/changes-list.tsx b/frontend/apps/desktop/src/changes-list.tsx index d6c7a85a..9779bf1d 100644 --- a/frontend/apps/desktop/src/changes-list.tsx +++ b/frontend/apps/desktop/src/changes-list.tsx @@ -172,9 +172,6 @@ function ChangeItem({ { destRoute && navigate(destRoute) }} diff --git a/frontend/apps/desktop/src/components/app-embeds.tsx b/frontend/apps/desktop/src/components/app-embeds.tsx index d852b3f5..f063d0be 100644 --- a/frontend/apps/desktop/src/components/app-embeds.tsx +++ b/frontend/apps/desktop/src/components/app-embeds.tsx @@ -1,5 +1,5 @@ import {useAccount_deprecated} from '@/models/accounts' -import {useSubscribedEntities, useSubscribedEntity} from '@/models/entities' +import {useSubscribedEntity} from '@/models/entities' import { DAEMON_FILE_URL, UnpackedHypermediaId, @@ -7,7 +7,6 @@ import { getDocumentTitle, hmId, packHmId, - unpackHmId, } from '@shm/shared' import { BlockContentUnknown, @@ -31,16 +30,13 @@ import {ArrowUpRightSquare} from '@tamagui/lucide-icons' import { ComponentProps, PropsWithChildren, - forwardRef, useEffect, useMemo, useRef, useState, } from 'react' -import {YStackProps} from 'tamagui' import {useComment} from '../models/comments' import {useNavigate} from '../utils/useNavigate' -import {EntityLinkThumbnail} from './account-link-thumbnail' function EmbedWrapper({ id, @@ -143,7 +139,6 @@ function EmbedWrapper({ : '$backgroundTransparent' } hoverStyle={{ - cursor: 'pointer', backgroundColor: isHighlight ? routeParams?.blockRef == id?.blockRef ? '$brand11' @@ -209,184 +204,6 @@ export function useSizeObserver(onRect: (rect: DOMRect) => void) { } } -const EmbedSideAnnotation = forwardRef< - HTMLDivElement, - {id: string; sidePos: 'bottom' | 'right'; disableEmbedClick?: boolean} ->(function EmbedSideAnnotation({id, sidePos, disableEmbedClick}, ref) { - const unpacked = unpackHmId(id) - - const sideStyles: YStackProps = - sidePos == 'right' - ? { - position: 'absolute', - top: 32, - right: -16, - transform: 'translateX(100%)', - } - : {} - - if (unpacked && unpacked.type == 'comment') - return ( - - ) - if (unpacked && unpacked.type != 'd') return null - const entity = useSubscribedEntity(unpacked) - const editors = useSubscribedEntities( - entity.data?.document?.authors.map((accountId) => hmId('d', accountId)) || - [], - ) - return ( - - - {getDocumentTitle(entity?.data?.document)} - - - {formattedDateMedium(entity.data?.document?.updateTime)} - - - - {editors - .map((editor) => editor.data) - .filter(Boolean) - .map( - (editorAccount, idx) => - editorAccount?.id && ( - - - - ), - )} - - - {disableEmbedClick ? null : ( - - Go to Document → - - )} - - ) -}) - -const CommentSideAnnotation = forwardRef(function CommentSideAnnotation( - props: {unpackedRef?: UnpackedHypermediaId; sideStyles: YStackProps}, - ref, -) { - const comment = useComment(props.unpackedRef) - - const unpackedTarget = useMemo(() => { - if (comment && comment.data?.target) { - return unpackHmId(comment.data.target) - } else { - return null - } - }, [comment]) - - const pubTarget = useSubscribedEntity(unpackedTarget) - - const editors = - pubTarget.data?.document?.authors.map((accountId) => - hmId('d', accountId), - ) || [] - - if (pubTarget.status == 'success') { - return ( - - {/* */} - - comment on{' '} - - {getDocumentTitle(pubTarget?.data?.document)} - - - {/* - {formattedDateMedium(pub.data?.document?.publishTime)} - */} - {/* */} - - {formattedDateMedium(pubTarget.data?.document?.updateTime)} - - - - {editors.map( - (editorId, idx) => - editorId?.id && ( - - - - ), - )} - - - - Go to Comment → - - - ) - } - - return null -}) - export function EmbedDocument(props: EntityComponentProps) { if (props.block.attributes?.view == 'card') { return @@ -564,7 +381,7 @@ function DocInlineEmbed(props: UnpackedHypermediaId) { const doc = useSubscribedEntity(props) return ( - {getDocumentTitle(doc.data?.document)} + @{getDocumentTitle(doc.data?.document)} ) } diff --git a/frontend/apps/desktop/src/components/avatar-form.tsx b/frontend/apps/desktop/src/components/avatar-form.tsx index 47d07207..16b2b597 100644 --- a/frontend/apps/desktop/src/components/avatar-form.tsx +++ b/frontend/apps/desktop/src/components/avatar-form.tsx @@ -87,7 +87,6 @@ export function ThumbnailForm({ top: 0, bottom: 0, zIndex: 100, - cursor: 'pointer', }} /> {emptyLabel && !url ? ( diff --git a/frontend/apps/desktop/src/components/citations.tsx b/frontend/apps/desktop/src/components/citations.tsx index bf83e9f8..6355a6b9 100644 --- a/frontend/apps/desktop/src/components/citations.tsx +++ b/frontend/apps/desktop/src/components/citations.tsx @@ -110,7 +110,6 @@ function CommentCitationItem({mention}: {mention: Mention}) { borderRadius="$2" backgroundColor={'$backgroundTransparent'} hoverStyle={{ - cursor: 'pointer', backgroundColor: '$backgroundHover', }} margin="$4" diff --git a/frontend/apps/desktop/src/components/collaborators-panel.tsx b/frontend/apps/desktop/src/components/collaborators-panel.tsx index 662237ff..b7a090d7 100644 --- a/frontend/apps/desktop/src/components/collaborators-panel.tsx +++ b/frontend/apps/desktop/src/components/collaborators-panel.tsx @@ -404,7 +404,6 @@ export const TagInput = forwardRef( borderColor="$borderColor" alignItems="center" hoverStyle={{ - cursor: 'pointer', backgroundColor: '$color7', }} /> diff --git a/frontend/apps/desktop/src/components/cover-image.tsx b/frontend/apps/desktop/src/components/cover-image.tsx index 22ed573f..e16620e4 100644 --- a/frontend/apps/desktop/src/components/cover-image.tsx +++ b/frontend/apps/desktop/src/components/cover-image.tsx @@ -92,10 +92,9 @@ export function CoverImage({ $group-cover-hover={{opacity: 1}} gap="$2" > - + diff --git a/frontend/apps/desktop/src/components/create-account.tsx b/frontend/apps/desktop/src/components/create-account.tsx index f9d6c834..f52de9cb 100644 --- a/frontend/apps/desktop/src/components/create-account.tsx +++ b/frontend/apps/desktop/src/components/create-account.tsx @@ -126,7 +126,7 @@ export function AccountWizardDialog() { signingAccount: createdAccount.accountId, content: [], metadata: { - accountType, + // accountType, }, members: [], previousId: null, diff --git a/frontend/apps/desktop/src/components/document-list-item.tsx b/frontend/apps/desktop/src/components/document-list-item.tsx deleted file mode 100644 index 1ed550b2..00000000 --- a/frontend/apps/desktop/src/components/document-list-item.tsx +++ /dev/null @@ -1,181 +0,0 @@ -import {useNavRoute} from '@/utils/navigation' -import {useClickNavigate} from '@/utils/useNavigate' -import { - Document, - HMAccount, - HMDocument, - NavRoute, - getDocumentTitle, - hmId, - packHmId, -} from '@shm/shared' -import { - ArrowUpRight, - Button, - ButtonText, - LinkThumbnail, - XStack, - copyTextToClipboard, -} from '@shm/ui' -import React from 'react' -import {useFavorite} from '../models/favorites' -import {useNavigate} from '../utils/useNavigate' -import {FavoriteButton} from './favoriting' -import {ListItem, TimeAccessory} from './list-item' -import {MenuItemType} from './options-dropdown' - -export const DocumentListItem = React.memo(function DocumentListItem({ - document, - debugId, - hasDraft, - menuItems = () => [], - onPointerEnter, - pathName, - openRoute, - onPathNamePress, - author, - editors, -}: { - document: HMDocument - debugId?: string - copy?: typeof copyTextToClipboard - hasDraft: Document | undefined - menuItems?: () => (MenuItemType | null)[] - pathName?: string - onPointerEnter?: () => void - openRoute: NavRoute - onPathNamePress?: () => void - author: HMAccount | string | undefined - editors: (string | HMAccount | undefined)[] -}) { - const spawn = useNavigate('spawn') - const title = getDocumentTitle(document) - const docId = document.id - const route = useNavRoute() - const docRoute = openRoute.key === 'document' ? openRoute : null - const docHmId = docRoute?.id - const docUrl = - docHmId && docRoute - ? packHmId( - hmId('d', docHmId.uid, { - version: docRoute.versionId, - }), - ) - : undefined - const favorite = useFavorite(docUrl) - - if (!docId) throw new Error('DocumentListItem requires document id') - - const navigate = useClickNavigate() - - return ( - { - navigate(openRoute, event) - }} - title={debugId ? `${title} - ${debugId}` : title} - onPointerEnter={onPointerEnter} - accessory={ - - {docUrl && ( - - - - )} - {hasDraft && ( - - )} - {pathName && ( - { - if (onPathNamePress) { - e.stopPropagation() - onPathNamePress() - } - }} - hoverStyle={ - onPathNamePress - ? { - textDecorationLine: 'underline', - } - : undefined - } - > - {pathName.length > 40 - ? `${pathName.slice(0, 15)}.....${pathName.slice( - pathName.length - 15, - )}` - : pathName} - - )} - - {editors && editors.length - ? editors.map((editor, idx) => { - const editorId = - typeof editor === 'string' ? editor : editor?.id - if (!editorId) return null - const account = typeof editor == 'string' ? undefined : editor - return ( - - - - ) - }) - : null} - - { - navigate(openRoute, event) - }} - /> - - } - menuItems={() => [ - ...(menuItems?.() || []), - { - key: 'spawn', - label: 'Open in New Window', - icon: ArrowUpRight, - onPress: () => { - spawn(openRoute) - }, - }, - ]} - /> - ) -}) diff --git a/frontend/apps/desktop/src/components/publish-draft-button.tsx b/frontend/apps/desktop/src/components/publish-draft-button.tsx index 7858db68..a7ada7d8 100644 --- a/frontend/apps/desktop/src/components/publish-draft-button.tsx +++ b/frontend/apps/desktop/src/components/publish-draft-button.tsx @@ -51,7 +51,6 @@ export default function PublishDraftButton() { if (!draftRoute) throw new Error('DraftPublicationButtons requires draft route') const draftId = draftRoute.id - const packedDraftId = draftId ? packHmId(draftId) : undefined const draft = useDraft(draftId) const pushOnPublish = usePushOnPublish() const prevEntity = useEntity(draftId?.type !== 'draft' ? draftId : undefined) @@ -183,6 +182,7 @@ export default function PublishDraftButton() { size="$2" onPress={handlePublish} borderRadius={0} + hoverStyle={{cursor: 'default'}} // disabled={!hassigningKeySelected} // opacity={hassigningKeySelected ? 1 : 0.3} icon={ diff --git a/frontend/apps/desktop/src/components/publish-site.tsx b/frontend/apps/desktop/src/components/publish-site.tsx index b9cbe226..73481de9 100644 --- a/frontend/apps/desktop/src/components/publish-site.tsx +++ b/frontend/apps/desktop/src/components/publish-site.tsx @@ -1,6 +1,5 @@ import {useEntity} from '@/models/entities' import {useRemoveSite, useSiteRegistration} from '@/models/site' -import {useNavigate} from '@/utils/useNavigate' import {zodResolver} from '@hookform/resolvers/zod' import {getDocumentTitle, UnpackedHypermediaId} from '@shm/shared' import {Button} from '@shm/ui' @@ -77,7 +76,6 @@ function PublishSiteDialog({ input: UnpackedHypermediaId onClose: () => void }) { - const navigate = useNavigate() const entity = useEntity(input) const register = useSiteRegistration(input.uid) const onSubmit: SubmitHandler = (data) => { diff --git a/frontend/apps/desktop/src/components/sidebar-base.tsx b/frontend/apps/desktop/src/components/sidebar-base.tsx index cefa1bbb..c23ebac7 100644 --- a/frontend/apps/desktop/src/components/sidebar-base.tsx +++ b/frontend/apps/desktop/src/components/sidebar-base.tsx @@ -69,7 +69,6 @@ export function GenericSidebarContainer({children}: {children: ReactNode}) { opacity: 0.1, }} bottom={bottom} - cursor="pointer" onMouseEnter={ctx.onMenuHoverDelayed} onMouseLeave={ctx.onMenuHoverLeave} onPress={ctx.onMenuHover} @@ -212,12 +211,16 @@ export function SidebarItem({ textAlign="left" outlineColor="transparent" backgroundColor={active ? activeBg : '$colorTransparent'} - hoverStyle={active ? {backgroundColor: activeBg} : {}} + hoverStyle={ + active + ? {backgroundColor: activeBg, cursor: 'default'} + : {cursor: 'default'} + } + cursor="default" userSelect="none" gap="$2" group="item" color={color || '$gray12'} - cursor={active ? undefined : 'pointer'} title={undefined} borderRadius="$2" iconAfter={ @@ -256,7 +259,6 @@ export function SidebarItem({ overflow="hidden" fontSize="$3" color={color || '$gray12'} - cursor={active ? undefined : 'pointer'} fontWeight={bold ? 'bold' : undefined} userSelect="none" > diff --git a/frontend/apps/desktop/src/components/sidebar.tsx b/frontend/apps/desktop/src/components/sidebar.tsx index cdd52e8e..23728b8a 100644 --- a/frontend/apps/desktop/src/components/sidebar.tsx +++ b/frontend/apps/desktop/src/components/sidebar.tsx @@ -113,12 +113,7 @@ function SidebarSection({ let Icon = collapsed ? ChevronRight : ChevronDown return ( - + { diff --git a/frontend/apps/desktop/src/components/subscription.tsx b/frontend/apps/desktop/src/components/subscription.tsx index 08cbf65e..c687048c 100644 --- a/frontend/apps/desktop/src/components/subscription.tsx +++ b/frontend/apps/desktop/src/components/subscription.tsx @@ -240,7 +240,6 @@ function SubscriptionOptionButton({ height="auto" onPress={onPress} disabled={active} - cursor={active ? 'default' : 'pointer'} borderRadius={0} pressStyle={{ backgroundColor: '$colorTransparent', diff --git a/frontend/apps/desktop/src/components/titlebar-common.tsx b/frontend/apps/desktop/src/components/titlebar-common.tsx index 63f886e6..edaefda3 100644 --- a/frontend/apps/desktop/src/components/titlebar-common.tsx +++ b/frontend/apps/desktop/src/components/titlebar-common.tsx @@ -24,8 +24,8 @@ import { createWebHMUrl, displayHostname, getDocumentTitle, + hmBlocksToEditorContent, hmId, - toHMBlock, } from '@shm/shared' import { Back, @@ -98,7 +98,7 @@ export function DocOptionsButton() { onPress: async () => { const title = doc.data?.document?.metadata.name || 'document' const blocks: HMBlockNode[] | undefined = doc.data?.document?.content - const editorBlocks = toHMBlock(blocks) + const editorBlocks = hmBlocksToEditorContent(blocks) const markdownWithFiles = await convertBlocksToMarkdown(editorBlocks) const {markdownContent, mediaFiles} = markdownWithFiles exportDocument(title, markdownContent, mediaFiles) @@ -387,7 +387,6 @@ export function NavigationButtons() { size="$2" onPress={() => dispatch({type: 'pop'})} chromeless - cursor={state.routeIndex <= 0 ? 'default' : 'pointer'} disabled={state.routeIndex <= 0} opacity={state.routeIndex <= 0 ? 0.5 : 1} icon={Back} @@ -398,11 +397,6 @@ export function NavigationButtons() { size="$2" onPress={() => dispatch({type: 'forward'})} chromeless - cursor={ - state.routeIndex >= state.routes.length - 1 - ? 'default' - : 'pointer' - } disabled={state.routeIndex >= state.routes.length - 1} opacity={state.routeIndex >= state.routes.length - 1 ? 0.5 : 1} icon={Forward} diff --git a/frontend/apps/desktop/src/components/titlebar-search.tsx b/frontend/apps/desktop/src/components/titlebar-search.tsx index 87253ae1..323591c9 100644 --- a/frontend/apps/desktop/src/components/titlebar-search.tsx +++ b/frontend/apps/desktop/src/components/titlebar-search.tsx @@ -58,9 +58,9 @@ export function TitlebarSearch() { size="$2" className="no-window-drag" icon={Search} - hoverStyle={{ - cursor: 'text !important', - }} + // hoverStyle={{ + // cursor: 'text !important', + // }} onPress={() => { setShowLauncher((v) => !v) }} diff --git a/frontend/apps/desktop/src/components/titlebar-windows-linux.tsx b/frontend/apps/desktop/src/components/titlebar-windows-linux.tsx index 67a9bc1c..3db33f76 100644 --- a/frontend/apps/desktop/src/components/titlebar-windows-linux.tsx +++ b/frontend/apps/desktop/src/components/titlebar-windows-linux.tsx @@ -66,7 +66,7 @@ export function WindowsLinuxTitleBar({ - + diff --git a/frontend/apps/desktop/src/components/versions-panel.tsx b/frontend/apps/desktop/src/components/versions-panel.tsx index 615a9e45..8ccf7793 100644 --- a/frontend/apps/desktop/src/components/versions-panel.tsx +++ b/frontend/apps/desktop/src/components/versions-panel.tsx @@ -1,11 +1,14 @@ import {useEntity} from '@/models/entities' -import {useDocumentChanges, useVersionChanges} from '@/models/versions' +import { + HMChangeInfo, + useDocumentChanges, + useVersionChanges, +} from '@/models/versions' import {useNavigate} from '@/utils/useNavigate' import { DocumentRoute, formattedDateMedium, getAccountName, - HMChangeInfo, hmId, } from '@shm/shared' import {Button, Thumbnail, Version} from '@shm/ui' diff --git a/frontend/apps/desktop/src/components/windows-linux-titlebar.tsx b/frontend/apps/desktop/src/components/windows-linux-titlebar.tsx index d0a3caee..2efba12f 100644 --- a/frontend/apps/desktop/src/components/windows-linux-titlebar.tsx +++ b/frontend/apps/desktop/src/components/windows-linux-titlebar.tsx @@ -295,7 +295,6 @@ export function SystemMenu() { hoverTheme pressTheme hoverStyle={{ - cursor: 'pointer', backgroundColor: '$backgroundFocus', }} paddingHorizontal="$3" diff --git a/frontend/apps/desktop/src/editor/autocomplete.tsx b/frontend/apps/desktop/src/editor/autocomplete.tsx index 5531952b..6bbc6d2d 100644 --- a/frontend/apps/desktop/src/editor/autocomplete.tsx +++ b/frontend/apps/desktop/src/editor/autocomplete.tsx @@ -181,9 +181,12 @@ export function createAutoCompletePlugin(args: { view.state, ) - const onCreate = (ref: string, range: {from: number; to: number}) => { + const onCreate = ( + link: string, + range: {from: number; to: number}, + ) => { const node = view.state.schema.nodes[nodeName].create({ - ref, + link, }) view.dispatch( view.state.tr.replaceWith( @@ -347,20 +350,13 @@ function AutocompletePopupInner( }, Enter: () => { let [group, idx] = index - console.log( - 'enter', - group, - idx, - groups.indexOf(group) > groups.length && - idx > suggestions[group].length, - ) + if ( groups.indexOf(group) < groups.length && idx < suggestions[group].length ) { let item = suggestions[group][idx] - console.log(`== ~ item:`, item) onCreate(item.value, range) onClose() } diff --git a/frontend/apps/desktop/src/editor/blocknote/core/api/nodeConversions/nodeConversions.ts b/frontend/apps/desktop/src/editor/blocknote/core/api/nodeConversions/nodeConversions.ts index e252b865..6114d708 100644 --- a/frontend/apps/desktop/src/editor/blocknote/core/api/nodeConversions/nodeConversions.ts +++ b/frontend/apps/desktop/src/editor/blocknote/core/api/nodeConversions/nodeConversions.ts @@ -123,7 +123,7 @@ export function inlineContentToNodes( } else if (content.type == 'inline-embed') { nodes.push( schema.nodes['inline-embed'].create({ - ref: content.ref, + link: content.link, }), ) } else { @@ -164,7 +164,7 @@ export function blockToNode( } else { let nodes: Node[] = [] // Don't want hard breaks inserted as nodes in codeblock - if (block.type === 'codeBlock' && block.content.length) { + if (block.type === 'code-block' && block.content.length) { // @ts-ignore const textNode = schema.text(block.content[0].text) nodes.push(textNode) @@ -232,7 +232,7 @@ function contentNodeToInlineContent(contentNode: Node) { content.push({ type: node.type.name, - ref: node.attrs.ref, + link: node.attrs.link, }) currentContent = undefined @@ -410,7 +410,7 @@ export function nodeToBlock( })) { const blockSpec = blockSchema[blockInfo.contentType.name] if (!blockSpec) { - if (blockInfo.contentType.name === 'codeBlock') { + if (blockInfo.contentType.name === 'code-block') { break } else throw Error( diff --git a/frontend/apps/desktop/src/editor/blocknote/core/extensions/BlockManipulation/BlockManipulationExtension.ts b/frontend/apps/desktop/src/editor/blocknote/core/extensions/BlockManipulation/BlockManipulationExtension.ts index bb1af99d..b7df265a 100644 --- a/frontend/apps/desktop/src/editor/blocknote/core/extensions/BlockManipulation/BlockManipulationExtension.ts +++ b/frontend/apps/desktop/src/editor/blocknote/core/extensions/BlockManipulation/BlockManipulationExtension.ts @@ -67,14 +67,9 @@ export const BlockManipulationExtension = Extension.create({ (node.type.name === 'image' && // @ts-ignore event.target?.nodeName === 'IMG') || - [ - 'file', - 'embed', - 'video', - 'web-embed', - 'math', - 'equation', - ].includes(node.type.name) + ['file', 'embed', 'video', 'web-embed', 'math'].includes( + node.type.name, + ) ) { let tr = view.state.tr const selection = NodeSelection.create(view.state.doc, nodePos) @@ -129,7 +124,6 @@ export const BlockManipulationExtension = Extension.create({ 'image', 'video', 'web-embed', - 'equation', 'math', ].includes(nextNode.type.name) ) { @@ -183,7 +177,6 @@ export const BlockManipulationExtension = Extension.create({ 'embed', 'video', 'web-embed', - 'equation', 'math', ].includes( blockInfoAtSelectionStart.contentType.name, @@ -194,7 +187,6 @@ export const BlockManipulationExtension = Extension.create({ 'embed', 'video', 'web-embed', - 'equation', 'math', ].includes(prevBlock.firstChild!.type.name) ) @@ -222,7 +214,6 @@ export const BlockManipulationExtension = Extension.create({ 'embed', 'video', 'web-embed', - 'equation', 'math', ].includes(blockInfo.contentType.name) ) { @@ -236,7 +227,6 @@ export const BlockManipulationExtension = Extension.create({ 'embed', 'video', 'web-embed', - 'equation', 'math', ].includes(prevNode.type.name) ) { @@ -257,7 +247,6 @@ export const BlockManipulationExtension = Extension.create({ 'embed', 'video', 'web-embed', - 'equation', 'math', ].includes(blockInfo.contentType.name) ) { @@ -305,7 +294,6 @@ export const BlockManipulationExtension = Extension.create({ 'embed', 'video', 'web-embed', - 'equation', 'math', ].includes(lastBlockInSelection.contentType.name) ) { @@ -321,7 +309,6 @@ export const BlockManipulationExtension = Extension.create({ 'embed', 'video', 'web-embed', - 'equation', 'math', ].includes(nextBlock.firstChild!.type.name) ) { @@ -349,7 +336,6 @@ export const BlockManipulationExtension = Extension.create({ 'embed', 'video', 'web-embed', - 'equation', 'math', ].includes(blockInfo.contentType.name) ) { @@ -363,7 +349,6 @@ export const BlockManipulationExtension = Extension.create({ 'embed', 'video', 'web-embed', - 'equation', 'math', ].includes(nextNode.type.name) ) { diff --git a/frontend/apps/desktop/src/editor/blocknote/core/extensions/Blocks/api/inlineContentTypes.ts b/frontend/apps/desktop/src/editor/blocknote/core/extensions/Blocks/api/inlineContentTypes.ts index 9111e330..4c5c7874 100644 --- a/frontend/apps/desktop/src/editor/blocknote/core/extensions/Blocks/api/inlineContentTypes.ts +++ b/frontend/apps/desktop/src/editor/blocknote/core/extensions/Blocks/api/inlineContentTypes.ts @@ -30,7 +30,7 @@ export type BNLink = { export type InlineEmbed = { type: 'inline-embed' - ref: string + link: string } export type PartialLink = Omit & { diff --git a/frontend/apps/desktop/src/editor/blocknote/core/extensions/Blocks/nodes/Block.module.css b/frontend/apps/desktop/src/editor/blocknote/core/extensions/Blocks/nodes/Block.module.css index 4143f862..f4febaab 100644 --- a/frontend/apps/desktop/src/editor/blocknote/core/extensions/Blocks/nodes/Block.module.css +++ b/frontend/apps/desktop/src/editor/blocknote/core/extensions/Blocks/nodes/Block.module.css @@ -301,7 +301,7 @@ NESTED BLOCKS } /* CODE BLOCK */ -.blockContent[data-content-type='codeBlock'] { +.blockContent[data-content-type='code-block'] { background-color: var(--color4); border-radius: 6px; padding: 10px 16px; @@ -311,7 +311,7 @@ NESTED BLOCKS position: 'relative'; } -.blockContent[data-content-type='codeBlock'] code { +.blockContent[data-content-type='code-block'] code { font-family: 'ui-monospace', 'SFMono-Regular', 'SF Mono', Menlo, Consolas, 'Liberation Mono', monospace; font-size: 0.85em; @@ -489,7 +489,7 @@ NESTED BLOCKS content: 'Heading'; } -.blockContent[data-content-type='codeBlock'].isEmpty.hasAnchor +.blockContent[data-content-type='code-block'].isEmpty.hasAnchor .inlineContent:before { font-family: 'Inter', diff --git a/frontend/apps/desktop/src/editor/blocknote/core/extensions/Blocks/nodes/BlockContainer.ts b/frontend/apps/desktop/src/editor/blocknote/core/extensions/Blocks/nodes/BlockContainer.ts index ddcd9f39..40b38d22 100644 --- a/frontend/apps/desktop/src/editor/blocknote/core/extensions/Blocks/nodes/BlockContainer.ts +++ b/frontend/apps/desktop/src/editor/blocknote/core/extensions/Blocks/nodes/BlockContainer.ts @@ -96,6 +96,7 @@ const PastePlugin = new Plugin({ key: PastePluginKey, props: { handlePaste: (view, event) => { + console.log('== PASTE BlockContainer PLUGIN', view.state.selection) if (!event.clipboardData) { return false } @@ -1021,14 +1022,9 @@ export const BlockContainer = Node.create<{ } if (!prevBlockInfo) return false if ( - [ - 'file', - 'embed', - 'video', - 'web-embed', - 'equation', - 'math', - ].includes(prevBlockInfo.contentType.name) || + ['file', 'embed', 'video', 'web-embed', 'math'].includes( + prevBlockInfo.contentType.name, + ) || (prevBlockInfo.contentType.name === 'image' && prevBlockInfo.contentNode.attrs.url.length === 0) ) { diff --git a/frontend/apps/desktop/src/editor/blocknote/core/extensions/LinkMenu/LinkMenuPlugin.ts b/frontend/apps/desktop/src/editor/blocknote/core/extensions/LinkMenu/LinkMenuPlugin.ts index e6f4796d..8765f0e9 100644 --- a/frontend/apps/desktop/src/editor/blocknote/core/extensions/LinkMenu/LinkMenuPlugin.ts +++ b/frontend/apps/desktop/src/editor/blocknote/core/extensions/LinkMenu/LinkMenuPlugin.ts @@ -14,7 +14,7 @@ export type LinkMenuState = BaseUiElementState & { // The items to display. items: LinkMenuItem[] // Pasted URL - ref: string + link: string // The index of the suggested item that's currently hovered by the keyboard. keyboardHoveredItemIndex: number } @@ -23,7 +23,7 @@ type LinkPluginState = { // True when the menu is shown, false when hidden. active: boolean // Pasted URL - ref: string + link: string // The items that should be shown in the menu. items: LinkMenuItem[] // The index of the item in the menu that's currently hovered using the keyboard. @@ -34,7 +34,7 @@ type LinkPluginState = { function getDefaultPluginState(): LinkPluginState { return { active: false, - ref: '', + link: '', items: [] as LinkMenuItem[], keyboardHoveredItemIndex: undefined, decorationId: undefined, @@ -111,7 +111,7 @@ export class LinkMenuView { if (this.editor.isEditable) { this.linkMenuState = { show: true, - ref: this.pluginState.ref, + link: this.pluginState.link, referencePos: decorationNode!.getBoundingClientRect(), items: this.pluginState.items, keyboardHoveredItemIndex: this.pluginState.keyboardHoveredItemIndex!, @@ -132,7 +132,7 @@ export class LinkMenuProsemirrorPlugin< > extends EventEmitter { // private linkMenuView: LinkMenuView | undefined public readonly plugin: Plugin - public readonly itemCallback: (item: MenuItem, ref: string) => void + public readonly itemCallback: (item: MenuItem, link: string) => void constructor(editor: BlockNoteEditor) { super() @@ -206,7 +206,7 @@ export const setupLinkMenu = < if (transaction.getMeta(pluginKey)?.activate) { return { active: true, - ref: ref, + link: link, items: items, keyboardHoveredItemIndex: 0, decorationId: `id_${Math.floor(Math.random() * 0xffffffff)}`, @@ -341,11 +341,11 @@ export const setupLinkMenu = < }, }, }), - itemCallback: (item: LinkMenuItem, ref: string) => { + itemCallback: (item: LinkMenuItem, link: string) => { deactivate(editor._tiptapEditor.view) editor._tiptapEditor.chain().focus().run() - item.execute(editor, ref) + item.execute(editor, link) }, } } diff --git a/frontend/apps/desktop/src/editor/blocknote/core/extensions/LinkMenu/defaultLinkMenuItems.tsx b/frontend/apps/desktop/src/editor/blocknote/core/extensions/LinkMenu/defaultLinkMenuItems.tsx index e8efe9be..536306fc 100644 --- a/frontend/apps/desktop/src/editor/blocknote/core/extensions/LinkMenu/defaultLinkMenuItems.tsx +++ b/frontend/apps/desktop/src/editor/blocknote/core/extensions/LinkMenu/defaultLinkMenuItems.tsx @@ -156,23 +156,26 @@ export function getLinkMenuItems({ name: `Mention "${docTitle}"`, disabled: false, icon: , - execute: (editor: BlockNoteEditor, ref: string) => { - if (isPublicGatewayLink(ref, gwUrl) || isHypermediaScheme(ref)) { - const hmId = normalizeHmId(ref, gwUrl) + execute: (editor: BlockNoteEditor, link: string) => { + if ( + isPublicGatewayLink(link, gwUrl) || + isHypermediaScheme(link) + ) { + const hmId = normalizeHmId(link, gwUrl) if (!hmId) return - ref = hmId + link = hmId } const {state, schema} = editor._tiptapEditor const {selection} = state if (!selection.empty) return const node = schema.nodes['inline-embed'].create( { - ref, + link, }, schema.text(' '), ) - insertMentionNode(editor, sourceUrl || ref, docTitle, node) + insertMentionNode(editor, sourceUrl || link, docTitle, node) }, }, ...linkMenuItems, @@ -282,20 +285,19 @@ function insertNode(editor: BlockNoteEditor, ref: string, node: Node) { function insertMentionNode( editor: BlockNoteEditor, - ref: string, + link: string, title: string, node: Node, ) { - const {state, schema, view} = editor._tiptapEditor - const {doc, selection} = state + const {state, view} = editor._tiptapEditor + const {selection} = state const {$from} = selection - const block = getBlockInfoFromPos(doc, selection.$anchor.pos) let tr = state.tr // If inserted link inline with other text (child count will be more than 1) const $pos = state.doc.resolve($from.pos) - let originalStartContent = state.doc.cut($pos.start(), $pos.pos - ref.length) + let originalStartContent = state.doc.cut($pos.start(), $pos.pos - link.length) view.dispatch( tr @@ -330,29 +332,3 @@ function insertMentionNode( // view.dispatch(tr) } - -function findClosestLinkRangeBefore(state) { - const {doc, selection} = state - const {from} = selection - let closestLinkRange = null - - // Iterate over the nodes in reverse order from the current selection position - doc.nodesBetween(0, from, (node, pos) => { - // If the node is an inline node of type link - if (node.isInline && node.type.name === 'link') { - // Get the start and end positions of the link node - const linkStart = pos - const linkEnd = pos + node.nodeSize - - // If this is the first link found or it's closer than the previous closest link - if ( - !closestLinkRange || - Math.abs(linkStart - from) < Math.abs(closestLinkRange.from - from) - ) { - closestLinkRange = {from: linkStart, to: linkEnd} - } - } - }) - - return closestLinkRange -} diff --git a/frontend/apps/desktop/src/editor/blocknote/core/extensions/Markdown/MarkdownExtension.ts b/frontend/apps/desktop/src/editor/blocknote/core/extensions/Markdown/MarkdownExtension.ts index e68cb3fd..dd74c14c 100644 --- a/frontend/apps/desktop/src/editor/blocknote/core/extensions/Markdown/MarkdownExtension.ts +++ b/frontend/apps/desktop/src/editor/blocknote/core/extensions/Markdown/MarkdownExtension.ts @@ -39,6 +39,10 @@ export const createMarkdownExtension = (bnEditor: BlockNoteEditor) => { new Plugin({ props: { handlePaste: (view, event, slice) => { + console.log( + '== PASTE MarkdownExtension PLUGIN', + view.state.selection, + ) const pastedText = event.clipboardData!.getData('text/plain') const pastedHtml = event.clipboardData!.getData('text/html') const hasList = diff --git a/frontend/apps/desktop/src/editor/blocknote/react/FormattingToolbar/components/DefaultButtons/ToggledStyleButton.tsx b/frontend/apps/desktop/src/editor/blocknote/react/FormattingToolbar/components/DefaultButtons/ToggledStyleButton.tsx index b1d32723..0fbbc1dd 100644 --- a/frontend/apps/desktop/src/editor/blocknote/react/FormattingToolbar/components/DefaultButtons/ToggledStyleButton.tsx +++ b/frontend/apps/desktop/src/editor/blocknote/react/FormattingToolbar/components/DefaultButtons/ToggledStyleButton.tsx @@ -22,7 +22,6 @@ const shortcuts: Record = { italic: 'Mod+I', underline: 'Mod+U', strike: 'Mod+Shift+X', - // blocknote had Mod+Shift+C here, but tiptap responds to Cmd-E with code toggle. code: 'Mod+E', // https://tiptap.dev/api/keyboard-shortcuts diff --git a/frontend/apps/desktop/src/editor/editor-types.ts b/frontend/apps/desktop/src/editor/editor-types.ts index d1e9ac8a..07be7697 100644 --- a/frontend/apps/desktop/src/editor/editor-types.ts +++ b/frontend/apps/desktop/src/editor/editor-types.ts @@ -21,10 +21,13 @@ export interface EditorBaseBlock { } export interface EditorBlockProps { - textAlignment?: 'left' | 'center' | 'right' + // textAlignment?: 'left' | 'center' | 'right' childrenType?: 'div' | 'ul' | 'ol' listLevel?: string start?: string + level?: number | string + ref?: string + revision?: string } export interface EditorParagraphBlock extends EditorBaseBlock { @@ -38,7 +41,7 @@ export interface EditorHeadingBlock extends EditorBaseBlock { } export interface EditorCodeBlock extends EditorBaseBlock { - type: 'codeBlock' + type: 'code-block' content: Array props: EditorBlockProps & { language?: string @@ -51,33 +54,34 @@ export interface MediaBlockProps extends EditorBlockProps { name?: string width?: number defaultOpen?: string - size?: string + size?: number } export interface EditorImageBlock extends EditorBaseBlock { type: 'image' props: MediaBlockProps - content: [] + content: Array } export interface EditorVideoBlock extends EditorBaseBlock { type: 'video' props: MediaBlockProps - content: [] + content: Array } export interface EditorFileBlock extends EditorBaseBlock { type: 'file' props: MediaBlockProps - content: [] + content: Array } export interface EditorEmbedBlock extends EditorBaseBlock { type: 'embed' props: EditorBlockProps & { - view: 'content' | 'card' + view: 'Content' | 'Card' + url: string } - content: [] + content: Array } export interface EditorMathBlock extends EditorBaseBlock { @@ -90,6 +94,7 @@ export type EditorWebEmbedBlock = EditorBaseBlock & { props: EditorBlockProps & { url?: string } + content: Array } export type EditorNostrBlock = EditorBaseBlock & { @@ -100,25 +105,25 @@ export type EditorNostrBlock = EditorBaseBlock & { text?: string size: number } + content: Array } -export interface EditorText extends EditorBaseInlineContent { +export interface EditorText { type: 'text' text: string + styles: EditorInlineStyles } -export interface EditorLink extends EditorBaseInlineContent { +export interface EditorLink { type: 'link' + // TODO: change to link href: string content: Array } export interface EditorInlineEmbed { type: 'inline-embed' - ref: string -} - -export interface EditorBaseInlineContent { + link: string styles: EditorInlineStyles | {} } @@ -126,6 +131,18 @@ export interface EditorInlineStyles { bold?: boolean italic?: boolean underline?: boolean - strikethrough?: boolean + strike?: boolean code?: boolean + math?: boolean } + +export type EditorAnnotationType = + | 'bold' + | 'italic' + | 'underline' + | 'strike' + | 'code' + | 'link' + | 'inline-embed' + +export type EditorBlockType = EditorBlock['type'] diff --git a/frontend/apps/desktop/src/editor/editor.css b/frontend/apps/desktop/src/editor/editor.css index 5e08a589..9ab6320e 100644 --- a/frontend/apps/desktop/src/editor/editor.css +++ b/frontend/apps/desktop/src/editor/editor.css @@ -45,7 +45,6 @@ .link:hover, .hm-link:hover { - cursor: pointer; text-decoration-thickness: 3px; } diff --git a/frontend/apps/desktop/src/editor/embed-block.tsx b/frontend/apps/desktop/src/editor/embed-block.tsx index a01aa821..5c83358f 100644 --- a/frontend/apps/desktop/src/editor/embed-block.tsx +++ b/frontend/apps/desktop/src/editor/embed-block.tsx @@ -66,7 +66,7 @@ export const EmbedBlock = createReactBlockSpec({ default: 'false', }, view: { - values: ['content', 'card'], // TODO: convert HMEmbedDisplay type to array items + values: ['content', 'card'], // TODO: convert HMEmbedView type to array items default: 'content', }, }, @@ -212,14 +212,14 @@ const display = ({ } block={{ id: block.id, - type: 'embed', + type: 'Embed', text: ' ', attributes: { childrenType: 'group', view: block.props.view, }, annotations: [], - ref: block.props.url, + link: block.props.url, }} depth={1} /> @@ -415,7 +415,6 @@ function EmbedControl({ onPress={handleViewSelect('content')} iconAfter={block.props.view == 'content' ? Check : null} hoverStyle={{ - cursor: 'pointer', bg: '$backgroundHover', }} /> @@ -428,7 +427,6 @@ function EmbedControl({ onPress={handleViewSelect('card')} iconAfter={block.props.view == 'card' ? Check : null} hoverStyle={{ - cursor: 'pointer', bg: '$backgroundHover', }} /> @@ -462,7 +460,6 @@ function EmbedControl({ onPress={handleVersionSelect('latest')} iconAfter={isVersionLatest ? Check : null} hoverStyle={{ - cursor: 'pointer', bg: '$backgroundHover', }} /> @@ -475,7 +472,6 @@ function EmbedControl({ onPress={handleVersionSelect('exact')} iconAfter={!isVersionLatest ? Check : null} hoverStyle={{ - cursor: 'pointer', bg: '$backgroundHover', }} /> diff --git a/frontend/apps/desktop/src/editor/handle-local-media-paste-plugin.ts b/frontend/apps/desktop/src/editor/handle-local-media-paste-plugin.ts index c2938bc8..143054ba 100644 --- a/frontend/apps/desktop/src/editor/handle-local-media-paste-plugin.ts +++ b/frontend/apps/desktop/src/editor/handle-local-media-paste-plugin.ts @@ -13,7 +13,7 @@ const handleLocalMediaPastePlugin = new Plugin({ key: new PluginKey('pm-local-media-paste'), props: { handlePaste(view, event) { - // console.log('== CURRENT SELECTION', view.state.selection) + console.log('== PASTE loval media paste PLUGIN', view.state.selection) let currentSelection = view.state.selection const items = Array.from(event.clipboardData?.items || []) if (items.length === 0) return false diff --git a/frontend/apps/desktop/src/editor/hm-formatting-toolbar.tsx b/frontend/apps/desktop/src/editor/hm-formatting-toolbar.tsx index e6e8523b..51ef8ad1 100644 --- a/frontend/apps/desktop/src/editor/hm-formatting-toolbar.tsx +++ b/frontend/apps/desktop/src/editor/hm-formatting-toolbar.tsx @@ -6,7 +6,7 @@ import { FormattingToolbarProps, } from './blocknote' -import {ToggledStyle} from '@shm/shared' +import {EditorToggledStyle} from '@shm/shared' import { Button, Check, @@ -42,29 +42,29 @@ const size: SizeTokens = '$3' const toggleStyles = [ { - name: 'Strong (Mod+B)', + name: 'Bold (Mod+B)', icon: Strong, - style: 'bold' as ToggledStyle, + style: 'bold' as EditorToggledStyle, }, { - name: 'Emphasis (Mod+I)', + name: 'Italic (Mod+I)', icon: Emphasis, - style: 'italic' as ToggledStyle, + style: 'italic' as EditorToggledStyle, }, { name: 'Underline (Mod+U)', icon: Underline, - style: 'underline' as ToggledStyle, + style: 'underline' as EditorToggledStyle, }, { name: 'Strikethrough (Mod+Shift+X)', icon: Strikethrough, - style: 'strike' as ToggledStyle, + style: 'strike' as EditorToggledStyle, }, { name: 'Code (Mod+E)', icon: Code, - style: 'code' as ToggledStyle, + style: 'code' as EditorToggledStyle, }, ] @@ -126,7 +126,7 @@ function ToggleStyleButton({ icon, }: { editor: BlockNoteEditor - toggleStyle: ToggledStyle + toggleStyle: EditorToggledStyle name: string icon: any }) { @@ -141,7 +141,7 @@ function ToggleStyleButton({ useEditorContentChange(editor, toggleCurrentStyle) useEditorSelectionChange(editor, toggleCurrentStyle) - function handlePress(style: ToggledStyle) { + function handlePress(style: EditorToggledStyle) { editor.focus() editor.toggleStyles({[toggleStyle]: true}) } diff --git a/frontend/apps/desktop/src/editor/math.tsx b/frontend/apps/desktop/src/editor/math.tsx index 2ee55cfd..8ca4a0fc 100644 --- a/frontend/apps/desktop/src/editor/math.tsx +++ b/frontend/apps/desktop/src/editor/math.tsx @@ -14,7 +14,7 @@ import { } from './blocknote' import {HMBlockSchema} from './schema' -export const MathBlock = (type: 'equation' | 'math') => +export const MathBlock = (type: 'math') => createReactBlockSpec({ type, propSchema: { @@ -142,7 +142,6 @@ const Render = ( paddingVertical="10px" paddingHorizontal="16px" position="relative" - hoverStyle={{cursor: `${!opened ? 'pointer' : ''}`}} userSelect="none" > @@ -182,7 +181,6 @@ const Render = ( 'embed', 'video', 'web-embed', - 'equation', 'math', ].includes(prevNode.type.name) ) { @@ -218,7 +216,6 @@ const Render = ( 'embed', 'video', 'web-embed', - 'equation', 'math', ].includes(nextNode.type.name) ) { diff --git a/frontend/apps/desktop/src/editor/media-render.tsx b/frontend/apps/desktop/src/editor/media-render.tsx index e41543f6..817a7ed1 100644 --- a/frontend/apps/desktop/src/editor/media-render.tsx +++ b/frontend/apps/desktop/src/editor/media-render.tsx @@ -494,7 +494,6 @@ function MediaForm({ fileName.color !== 'red' ? { backgroundColor: '$color5', - cursor: 'pointer', } : {cursor: 'auto'} } @@ -552,7 +551,6 @@ function MediaForm({ fileName.color !== 'red' ? { backgroundColor: '$color5', - cursor: 'pointer', } : {cursor: 'auto'} } @@ -596,7 +594,6 @@ function MediaForm({ justifyContent="center" hoverStyle={{ backgroundColor: '$borderColorHover', - cursor: 'pointer', }} gap={3} > diff --git a/frontend/apps/desktop/src/editor/mentions-plugin.tsx b/frontend/apps/desktop/src/editor/mentions-plugin.tsx index 1e820826..5d50357f 100644 --- a/frontend/apps/desktop/src/editor/mentions-plugin.tsx +++ b/frontend/apps/desktop/src/editor/mentions-plugin.tsx @@ -51,7 +51,9 @@ export function createInlineEmbedNode(bnEditor: any) { getAttrs: (dom) => { if (dom instanceof HTMLElement) { var value = dom.getAttribute('data-inline-embed') - return {ref: value} + + console.log(`== node ~ parseHTML ~ value:`, value) + return {link: value} } return false }, @@ -60,7 +62,7 @@ export function createInlineEmbedNode(bnEditor: any) { }, addAttributes() { return { - ref: { + link: { default: '', }, } @@ -77,9 +79,9 @@ function InlineEmbedNodeComponent(props: any) { return ( - + ) } @@ -117,6 +119,10 @@ export function MentionText(props) { return ( Search @@ -475,7 +474,6 @@ function NostrForm({ borderBottomWidth={tabState == 'manual' ? '$1' : '$0'} hoverStyle={{ backgroundColor: '$borderColorHover', - cursor: 'pointer', }} > Manual @@ -524,7 +522,6 @@ function NostrForm({ color="$color1" hoverStyle={{ backgroundColor: '$color11', - cursor: 'pointer', }} > SEARCH @@ -582,7 +579,6 @@ function NostrForm({ color="$color1" hoverStyle={{ backgroundColor: '$color11', - cursor: 'pointer', }} > EMBED diff --git a/frontend/apps/desktop/src/editor/schema.ts b/frontend/apps/desktop/src/editor/schema.ts index d527d322..73a4a2c4 100644 --- a/frontend/apps/desktop/src/editor/schema.ts +++ b/frontend/apps/desktop/src/editor/schema.ts @@ -26,7 +26,7 @@ export const hmBlockSchema: BlockSchema = { node: HMHeadingBlockContent, }, image: ImageBlock, - codeBlock: { + ['code-block']: { propSchema: { ...defaultProps, language: {default: ''}, @@ -43,7 +43,6 @@ export const hmBlockSchema: BlockSchema = { file: FileBlock, nostr: NostrBlock, ['web-embed']: WebEmbed, - equation: MathBlock('equation'), // DEPRECATE math: MathBlock('math'), } diff --git a/frontend/apps/desktop/src/editor/tiptap-extension-code-block/code-block-view.tsx b/frontend/apps/desktop/src/editor/tiptap-extension-code-block/code-block-view.tsx index 832eb10e..67151969 100644 --- a/frontend/apps/desktop/src/editor/tiptap-extension-code-block/code-block-view.tsx +++ b/frontend/apps/desktop/src/editor/tiptap-extension-code-block/code-block-view.tsx @@ -40,13 +40,7 @@ export const CodeBlockView = ({ $group-item-hover={{opacity: 1}} >