diff --git a/gnovm/cmd/gno/doc.go b/gnovm/cmd/gno/doc.go new file mode 100644 index 00000000000..c54e289cd67 --- /dev/null +++ b/gnovm/cmd/gno/doc.go @@ -0,0 +1,94 @@ +package main + +import ( + "context" + "flag" + "path/filepath" + + "github.com/gnolang/gno/gnovm/pkg/doc" + "github.com/gnolang/gno/tm2/pkg/commands" +) + +type docCfg struct { + all bool + src bool + unexported bool + short bool + rootDir string +} + +func newDocCmd(io *commands.IO) *commands.Command { + c := &docCfg{} + return commands.NewCommand( + commands.Metadata{ + Name: "doc", + ShortUsage: "doc [flags] ", + ShortHelp: "get documentation for the specified package or symbol (type, function, method, or variable/constant).", + }, + c, + func(_ context.Context, args []string) error { + return execDoc(c, args, io) + }, + ) +} + +func (c *docCfg) RegisterFlags(fs *flag.FlagSet) { + fs.BoolVar( + &c.all, + "all", + false, + "show documentation for all symbols in package", + ) + + fs.BoolVar( + &c.src, + "src", + false, + "show source code for symbols", + ) + + fs.BoolVar( + &c.unexported, + "u", + false, + "show unexported symbols as well as exported", + ) + + fs.BoolVar( + &c.short, + "short", + false, + "show a one line representation for each symbol", + ) + + fs.StringVar( + &c.rootDir, + "root-dir", + "", + "clone location of github.com/gnolang/gno (gnodev tries to guess it)", + ) +} + +func execDoc(cfg *docCfg, args []string, io *commands.IO) error { + // guess opts.RootDir + if cfg.rootDir == "" { + cfg.rootDir = guessRootDir() + } + dirs := []string{filepath.Join(cfg.rootDir, "gnovm/stdlibs"), filepath.Join(cfg.rootDir, "examples")} + res, err := doc.ResolveDocumentable(dirs, args, cfg.unexported) + if res == nil { + return err + } + if err != nil { + io.Printfln("warning: error parsing some candidate packages:\n%v", err) + } + return res.WriteDocumentation( + io.Out, + &doc.WriteDocumentationOptions{ + ShowAll: cfg.all, + Source: cfg.src, + Unexported: cfg.unexported, + Short: false, + }, + ) +} diff --git a/gnovm/cmd/gno/doc_test.go b/gnovm/cmd/gno/doc_test.go new file mode 100644 index 00000000000..3eb90e2a329 --- /dev/null +++ b/gnovm/cmd/gno/doc_test.go @@ -0,0 +1,29 @@ +package main + +import "testing" + +func TestGnoDoc(t *testing.T) { + tc := []testMainCase{ + { + args: []string{"doc", "io.Writer"}, + stdoutShouldContain: "Writer is the interface that wraps", + }, + { + args: []string{"doc", "avl"}, + stdoutShouldContain: "func NewNode", + }, + { + args: []string{"doc", "-u", "avl.Node"}, + stdoutShouldContain: "node *Node", + }, + { + args: []string{"doc", "dkfdkfkdfjkdfj"}, + errShouldContain: "package not found", + }, + { + args: []string{"doc", "There.Are.Too.Many.Dots"}, + errShouldContain: "invalid arguments", + }, + } + testMainCaseRun(t, tc) +} diff --git a/gnovm/cmd/gno/main.go b/gnovm/cmd/gno/main.go index 8fa33a91d83..9f0a55bf9cc 100644 --- a/gnovm/cmd/gno/main.go +++ b/gnovm/cmd/gno/main.go @@ -35,6 +35,7 @@ func newGnodevCmd(io *commands.IO) *commands.Command { newTestCmd(io), newModCmd(io), newReplCmd(), + newDocCmd(io), // fmt -- gofmt // clean // graph @@ -43,7 +44,6 @@ func newGnodevCmd(io *commands.IO) *commands.Command { // render -- call render()? // publish/release // generate - // doc -- godoc // "vm" -- starts an in-memory chain that can be interacted with? // bug -- start a bug report // version -- show gnodev, golang versions diff --git a/gnovm/pkg/doc/dirs.go b/gnovm/pkg/doc/dirs.go new file mode 100644 index 00000000000..c6f90d167e4 --- /dev/null +++ b/gnovm/pkg/doc/dirs.go @@ -0,0 +1,163 @@ +// Mostly copied from go source at tip, commit d922c0a. +// +// Copyright 2015 The Go Authors. All rights reserved. + +package doc + +import ( + "log" + "os" + "path/filepath" + "sort" + "strings" +) + +// A bfsDir describes a directory holding code by specifying +// the expected import path and the file system directory. +type bfsDir struct { + importPath string // import path for that dir + dir string // file system directory +} + +// dirs is a structure for scanning the directory tree. +// Its Next method returns the next Go source directory it finds. +// Although it can be used to scan the tree multiple times, it +// only walks the tree once, caching the data it finds. +type bfsDirs struct { + scan chan bfsDir // Directories generated by walk. + hist []bfsDir // History of reported Dirs. + offset int // Counter for Next. +} + +// newDirs begins scanning the given stdlibs directory. +func newDirs(dirs ...string) *bfsDirs { + d := &bfsDirs{ + hist: make([]bfsDir, 0, 256), + scan: make(chan bfsDir), + } + go d.walk(dirs) + return d +} + +// Reset puts the scan back at the beginning. +func (d *bfsDirs) Reset() { + d.offset = 0 +} + +// Next returns the next directory in the scan. The boolean +// is false when the scan is done. +func (d *bfsDirs) Next() (bfsDir, bool) { + if d.offset < len(d.hist) { + dir := d.hist[d.offset] + d.offset++ + return dir, true + } + dir, ok := <-d.scan + if !ok { + return bfsDir{}, false + } + d.hist = append(d.hist, dir) + d.offset++ + return dir, ok +} + +// walk walks the trees in the given roots. +func (d *bfsDirs) walk(roots []string) { + for _, root := range roots { + d.bfsWalkRoot(root) + } + close(d.scan) +} + +// bfsWalkRoot walks a single directory hierarchy in breadth-first lexical order. +// Each Go source directory it finds is delivered on d.scan. +func (d *bfsDirs) bfsWalkRoot(root string) { + root = filepath.Clean(root) + + // this is the queue of directories to examine in this pass. + this := []string{} + // next is the queue of directories to examine in the next pass. + next := []string{root} + + for len(next) > 0 { + this, next = next, this[:0] + for _, dir := range this { + fd, err := os.Open(dir) + if err != nil { + log.Print(err) + continue + } + entries, err := fd.Readdir(0) + fd.Close() + if err != nil { + log.Print(err) + continue + } + hasGnoFiles := false + for _, entry := range entries { + name := entry.Name() + // For plain files, remember if this directory contains any .gno + // source files, but ignore them otherwise. + if !entry.IsDir() { + if !hasGnoFiles && strings.HasSuffix(name, ".gno") { + hasGnoFiles = true + } + continue + } + // Entry is a directory. + + // Ignore same directories ignored by the go tool. + if name[0] == '.' || name[0] == '_' || name == "testdata" { + continue + } + // Remember this (fully qualified) directory for the next pass. + next = append(next, filepath.Join(dir, name)) + } + if hasGnoFiles { + // It's a candidate. + var importPath string + if len(dir) > len(root) { + importPath = filepath.ToSlash(dir[len(root)+1:]) + } + d.scan <- bfsDir{importPath, dir} + } + } + } +} + +// findPackage finds a package iterating over d where the import path has +// name as a suffix (which may be a package name or a fully-qualified path). +// returns a list of possible directories. If a directory's import path matched +// exactly, it will be returned as first. +func (d *bfsDirs) findPackage(name string) []bfsDir { + d.Reset() + candidates := make([]bfsDir, 0, 4) + for dir, ok := d.Next(); ok; dir, ok = d.Next() { + // want either exact matches or suffixes + if dir.importPath == name || strings.HasSuffix(dir.importPath, "/"+name) { + candidates = append(candidates, dir) + } + } + sort.Slice(candidates, func(i, j int) bool { + // prefer exact matches with name + if candidates[i].importPath == name { + return true + } else if candidates[j].importPath == name { + return false + } + return candidates[i].importPath < candidates[j].importPath + }) + return candidates +} + +// findDir determines if the given absdir is present in the Dirs. +// If not, the nil slice is returned. It returns always at most one dir. +func (d *bfsDirs) findDir(absdir string) []bfsDir { + d.Reset() + for dir, ok := d.Next(); ok; dir, ok = d.Next() { + if dir.dir == absdir { + return []bfsDir{dir} + } + } + return nil +} diff --git a/gnovm/pkg/doc/dirs_test.go b/gnovm/pkg/doc/dirs_test.go new file mode 100644 index 00000000000..a7c4926a8c8 --- /dev/null +++ b/gnovm/pkg/doc/dirs_test.go @@ -0,0 +1,74 @@ +package doc + +import ( + "path/filepath" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func tNewDirs(t *testing.T) (string, *bfsDirs) { + t.Helper() + p, err := filepath.Abs("./testdata/dirs") + require.NoError(t, err) + return p, newDirs(p) +} + +func TestDirs_findPackage(t *testing.T) { + abs, d := tNewDirs(t) + tt := []struct { + name string + res []bfsDir + }{ + {"rand", []bfsDir{ + {importPath: "rand", dir: filepath.Join(abs, "rand")}, + {importPath: "crypto/rand", dir: filepath.Join(abs, "crypto/rand")}, + {importPath: "math/rand", dir: filepath.Join(abs, "math/rand")}, + }}, + {"crypto/rand", []bfsDir{ + {importPath: "crypto/rand", dir: filepath.Join(abs, "crypto/rand")}, + }}, + {"math", []bfsDir{ + {importPath: "math", dir: filepath.Join(abs, "math")}, + }}, + {"ath", []bfsDir{}}, + {"/math", []bfsDir{}}, + {"", []bfsDir{}}, + } + for _, tc := range tt { + tc := tc + t.Run("name_"+strings.Replace(tc.name, "/", "_", -1), func(t *testing.T) { + res := d.findPackage(tc.name) + assert.Equal(t, tc.res, res, "dirs returned should be the equal") + }) + } +} + +func TestDirs_findDir(t *testing.T) { + abs, d := tNewDirs(t) + tt := []struct { + name string + in string + res []bfsDir + }{ + {"rand", filepath.Join(abs, "rand"), []bfsDir{ + {importPath: "rand", dir: filepath.Join(abs, "rand")}, + }}, + {"crypto/rand", filepath.Join(abs, "crypto/rand"), []bfsDir{ + {importPath: "crypto/rand", dir: filepath.Join(abs, "crypto/rand")}, + }}, + // ignored (dir name testdata), so should not return anything. + {"crypto/testdata/rand", filepath.Join(abs, "crypto/testdata/rand"), nil}, + {"xx", filepath.Join(abs, "xx"), nil}, + {"xx2", "/xx2", nil}, + } + for _, tc := range tt { + tc := tc + t.Run(strings.Replace(tc.name, "/", "_", -1), func(t *testing.T) { + res := d.findDir(tc.in) + assert.Equal(t, tc.res, res, "dirs returned should be the equal") + }) + } +} diff --git a/gnovm/pkg/doc/doc.go b/gnovm/pkg/doc/doc.go new file mode 100644 index 00000000000..cecd97f53d9 --- /dev/null +++ b/gnovm/pkg/doc/doc.go @@ -0,0 +1,329 @@ +// Package doc implements support for documentation of Gno packages and realms, +// in a similar fashion to `go doc`. +// As a reference, the [official implementation] for `go doc` is used. +// +// [official implementation]: https://github.com/golang/go/tree/90dde5dec1126ddf2236730ec57511ced56a512d/src/cmd/doc +package doc + +import ( + "errors" + "fmt" + "go/ast" + "go/doc" + "go/token" + "io" + "log" + "os" + "path/filepath" + "strings" + + "go.uber.org/multierr" +) + +// WriteDocumentationOptions represents the possible options when requesting +// documentation through Documentable. +type WriteDocumentationOptions struct { + // ShowAll shows all symbols when displaying documentation about a package. + ShowAll bool + // Source shows the source code when documenting a symbol. + Source bool + // Unexported shows unexported symbols as well as exported. + Unexported bool + // Short shows a one-line representation for each symbol. + Short bool + + w io.Writer +} + +// Documentable is a package, symbol, or accessible which can be documented. +type Documentable interface { + WriteDocumentation(w io.Writer, opts *WriteDocumentationOptions) error +} + +// static implementation check +var _ Documentable = (*documentable)(nil) + +type documentable struct { + bfsDir + symbol string + accessible string + pkgData *pkgData +} + +func (d *documentable) WriteDocumentation(w io.Writer, o *WriteDocumentationOptions) error { + if o == nil { + o = &WriteDocumentationOptions{} + } + o.w = w + + var err error + // pkgData may already be initialised if we already had to look to see + // if it had the symbol we wanted; otherwise initialise it now. + if d.pkgData == nil { + d.pkgData, err = newPkgData(d.bfsDir, o.Unexported) + if err != nil { + return err + } + } + + astpkg, pkg, err := d.pkgData.docPackage(o) + if err != nil { + return err + } + + // copied from go source - map vars, constants and constructors to their respective types. + typedValue := make(map[*doc.Value]bool) + constructor := make(map[*doc.Func]bool) + for _, typ := range pkg.Types { + pkg.Consts = append(pkg.Consts, typ.Consts...) + pkg.Vars = append(pkg.Vars, typ.Vars...) + pkg.Funcs = append(pkg.Funcs, typ.Funcs...) + if !o.Unexported && !token.IsExported(typ.Name) { + continue + } + for _, value := range typ.Consts { + typedValue[value] = true + } + for _, value := range typ.Vars { + typedValue[value] = true + } + for _, fun := range typ.Funcs { + // We don't count it as a constructor bound to the type + // if the type itself is not exported. + constructor[fun] = true + } + } + + pp := &pkgPrinter{ + name: d.pkgData.name, + pkg: astpkg, + file: ast.MergePackageFiles(astpkg, 0), + doc: pkg, + typedValue: typedValue, + constructor: constructor, + fs: d.pkgData.fset, + opt: o, + importPath: d.importPath, + } + pp.buf.pkg = pp + + return d.output(pp) +} + +func (d *documentable) output(pp *pkgPrinter) (err error) { + defer func() { + // handle the case of errFatal. + // this will have been generated by pkg.Fatalf, so get the error + // from pp.err. + e := recover() + ee, ok := e.(error) + if e != nil && ok && errors.Is(ee, errFatal) { + panic(e) + } + + flushErr := pp.flush() + if pp.err == nil { + err = pp.err + } + if flushErr != nil { + err = multierr.Combine(err, fmt.Errorf("error flushing: %w", err)) + } + }() + + switch { + case d.symbol == "" && d.accessible == "": + if pp.opt.ShowAll { + pp.allDoc() + return + } + pp.packageDoc() + case d.symbol != "" && d.accessible == "": + pp.symbolDoc(d.symbol) + default: // both non-empty + if pp.methodDoc(d.symbol, d.accessible) { + return + } + if pp.fieldDoc(d.symbol, d.accessible) { + return + } + } + + return +} + +// set as a variable so it can be changed by testing. +var fpAbs = filepath.Abs + +// ResolveDocumentable returns a Documentable from the given arguments. +// Refer to the documentation of gnodev doc for the formats accepted (in general +// the same as the go doc command). +// An error may be returned even if documentation was resolved in case some +// packages in dirs could not be parsed correctly. +func ResolveDocumentable(dirs []string, args []string, unexported bool) (Documentable, error) { + d := newDirs(dirs...) + + parsed, ok := parseArgs(args) + if !ok { + return nil, fmt.Errorf("commands/doc: invalid arguments: %v", args) + } + return resolveDocumentable(d, parsed, unexported) +} + +func resolveDocumentable(dirs *bfsDirs, parsed docArgs, unexported bool) (Documentable, error) { + var candidates []bfsDir + + // if we have a candidate package name, search dirs for a dir that matches it. + // prefer directories whose import path match precisely the package + if s, err := os.Stat(parsed.pkg); err == nil && s.IsDir() { + // expand to full path - fpAbs is filepath.Abs except in test + absVal, err := fpAbs(parsed.pkg) + if err == nil { + candidates = dirs.findDir(absVal) + } else { + // this is very rare - generally syscall failure or os.Getwd failing + log.Printf("warning: could not determine abs path: %v", err) + } + } else if err != nil && !os.IsNotExist(err) { + // also quite rare, generally will be permission errors (in reading cwd) + log.Printf("warning: tried showing documentation for directory %q, error: %v", parsed.pkg, err) + } + // arg is either not a dir, or if it matched a local dir it was not + // valid (ie. not scanned by dirs). try parsing as a package + if len(candidates) == 0 { + candidates = dirs.findPackage(parsed.pkg) + } + + if len(candidates) == 0 { + // there are no candidates. + // if this is ambiguous, remove ambiguity and try parsing args using pkg as the symbol. + if !parsed.pkgAmbiguous { + return nil, fmt.Errorf("commands/doc: package not found: %q (note: local packages are not yet supported)", parsed.pkg) + } + parsed = docArgs{pkg: ".", sym: parsed.pkg, acc: parsed.sym} + return resolveDocumentable(dirs, parsed, unexported) + } + // we wanted documentabfsDirn about a package, and we found one! + if parsed.sym == "" { + return &documentable{bfsDir: candidates[0]}, nil + } + + // we also have a symbol, and maybe accessible. + // search for the symbol through the candidates + + doc := &documentable{ + symbol: parsed.sym, + accessible: parsed.acc, + } + + var matchFunc func(s symbolData) bool + if parsed.acc == "" { + matchFunc = func(s symbolData) bool { + return (s.accessible == "" && symbolMatch(parsed.sym, s.symbol)) || + (s.typ == symbolDataMethod && symbolMatch(parsed.sym, s.accessible)) + } + } else { + matchFunc = func(s symbolData) bool { + return symbolMatch(parsed.sym, s.symbol) && symbolMatch(parsed.acc, s.accessible) + } + } + + var errs []error + for _, candidate := range candidates { + pd, err := newPkgData(candidate, unexported) + if err != nil { + // report errors as warning, but don't fail because of them + // likely ast/parsing errors. + errs = append(errs, err) + continue + } + for _, sym := range pd.symbols { + if !matchFunc(sym) { + continue + } + doc.bfsDir = candidate + doc.pkgData = pd + // match found. return this as documentable. + return doc, multierr.Combine(errs...) + } + } + return nil, multierr.Append( + fmt.Errorf("commands/doc: could not resolve arguments: %+v", parsed), + multierr.Combine(errs...), + ) +} + +// docArgs represents the parsed args of the doc command. +// sym could be a symbol, but the accessibles of types should also be shown if they match sym. +type docArgs struct { + pkg string // always set + sym string + acc string // short for "accessible". only set if sym is also set + + // pkg could be a symbol in the local dir. + // if that is the case, and sym != "", then sym, acc = pkg, sym + pkgAmbiguous bool +} + +func parseArgs(args []string) (docArgs, bool) { + switch len(args) { + case 0: + return docArgs{pkg: "."}, true + case 1: + // allowed syntaxes (acc is method or field, [] marks optional): + // + // [.][.] + // [.][.] + // if the (part) argument contains a slash, then it is most certainly + // a pkg. + // note: pkg can be a relative path. this is mostly problematic for ".." and + // ".". so we count full stops from the last slash. + slash := strings.LastIndexByte(args[0], '/') + if args[0] == "." || args[0] == ".." || + (slash != -1 && args[0][slash+1:] == "..") { + // special handling for common ., .. and /.. + // these will generally work poorly if you try to use the one-argument + // syntax to access a symbol/accessible. + return docArgs{pkg: args[0]}, true + } + switch strings.Count(args[0][slash+1:], ".") { + case 0: + if slash != -1 { + return docArgs{pkg: args[0]}, true + } + return docArgs{pkg: args[0], pkgAmbiguous: true}, true + case 1: + pos := strings.IndexByte(args[0][slash+1:], '.') + slash + 1 + if slash != -1 { + return docArgs{pkg: args[0][:pos], sym: args[0][pos+1:]}, true + } + if token.IsExported(args[0]) { + // See rationale here: + // https://github.com/golang/go/blob/90dde5dec1126ddf2236730ec57511ced56a512d/src/cmd/doc/main.go#L265 + return docArgs{pkg: ".", sym: args[0][:pos], acc: args[0][pos+1:]}, true + } + return docArgs{pkg: args[0][:pos], sym: args[0][pos+1:], pkgAmbiguous: true}, true + case 2: + // pkg.sym.acc + parts := strings.Split(args[0][slash+1:], ".") + return docArgs{ + pkg: args[0][:slash+1] + parts[0], + sym: parts[1], + acc: parts[2], + }, true + default: + return docArgs{}, false + } + case 2: + switch strings.Count(args[1], ".") { + case 0: + return docArgs{pkg: args[0], sym: args[1]}, true + case 1: + pos := strings.IndexByte(args[1], '.') + return docArgs{pkg: args[0], sym: args[1][:pos], acc: args[1][pos+1:]}, true + default: + return docArgs{}, false + } + default: + return docArgs{}, false + } +} diff --git a/gnovm/pkg/doc/doc_test.go b/gnovm/pkg/doc/doc_test.go new file mode 100644 index 00000000000..1cccb4106f7 --- /dev/null +++ b/gnovm/pkg/doc/doc_test.go @@ -0,0 +1,242 @@ +package doc + +import ( + "bytes" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestResolveDocumentable(t *testing.T) { + p, err := os.Getwd() + require.NoError(t, err) + path := func(s string) string { return filepath.Join(p, "testdata/integ", s) } + dirs := newDirs(path("")) + getDir := func(p string) bfsDir { return dirs.findDir(path(p))[0] } + pdata := func(p string, unexp bool) *pkgData { + pd, err := newPkgData(getDir(p), unexp) + require.NoError(t, err) + return pd + } + + tt := []struct { + name string + args []string + unexp bool + expect Documentable + errContains string + }{ + {"package", []string{"crypto/rand"}, false, &documentable{bfsDir: getDir("crypto/rand")}, ""}, + {"dir", []string{"./testdata/integ/crypto/rand"}, false, &documentable{bfsDir: getDir("crypto/rand")}, ""}, + {"dirAbs", []string{path("crypto/rand")}, false, &documentable{bfsDir: getDir("crypto/rand")}, ""}, + // test_notapkg exists in local dir and also path("test_notapkg"). + // ResolveDocumentable should first try local dir, and seeing as it is not a valid dir, try searching it as a package. + {"dirLocalMisleading", []string{"test_notapkg"}, false, &documentable{bfsDir: getDir("test_notapkg")}, ""}, + { + "normalSymbol", + []string{"crypto/rand.Flag"}, + false, + &documentable{bfsDir: getDir("crypto/rand"), symbol: "Flag", pkgData: pdata("crypto/rand", false)}, "", + }, + { + "normalAccessible", + []string{"crypto/rand.Generate"}, + false, + &documentable{bfsDir: getDir("crypto/rand"), symbol: "Generate", pkgData: pdata("crypto/rand", false)}, "", + }, + { + "normalSymbolUnexp", + []string{"crypto/rand.unexp"}, + true, + &documentable{bfsDir: getDir("crypto/rand"), symbol: "unexp", pkgData: pdata("crypto/rand", true)}, "", + }, + { + "normalAccessibleFull", + []string{"crypto/rand.Rand.Name"}, + false, + &documentable{bfsDir: getDir("crypto/rand"), symbol: "Rand", accessible: "Name", pkgData: pdata("crypto/rand", false)}, "", + }, + { + "disambiguate", + []string{"rand.Flag"}, + false, + &documentable{bfsDir: getDir("crypto/rand"), symbol: "Flag", pkgData: pdata("crypto/rand", false)}, "", + }, + { + "disambiguate2", + []string{"rand.Crypto"}, + false, + &documentable{bfsDir: getDir("crypto/rand"), symbol: "Crypto", pkgData: pdata("crypto/rand", false)}, "", + }, + { + "disambiguate3", + []string{"rand.Normal"}, + false, + &documentable{bfsDir: getDir("rand"), symbol: "Normal", pkgData: pdata("rand", false)}, "", + }, + { + "disambiguate4", // just "rand" should use the directory that matches it exactly. + []string{"rand"}, + false, + &documentable{bfsDir: getDir("rand")}, "", + }, + { + "wdSymbol", + []string{"WdConst"}, + false, + &documentable{bfsDir: getDir("wd"), symbol: "WdConst", pkgData: pdata("wd", false)}, "", + }, + + {"errInvalidArgs", []string{"1", "2", "3"}, false, nil, "invalid arguments: [1 2 3]"}, + {"errNoCandidates", []string{"math", "Big"}, false, nil, `package not found: "math"`}, + {"errNoCandidates2", []string{"LocalSymbol"}, false, nil, `local packages are not yet supported`}, + {"errNoCandidates3", []string{"Symbol.Accessible"}, false, nil, `local packages are not yet supported`}, + {"errNonExisting", []string{"rand.NotExisting"}, false, nil, `could not resolve arguments`}, + {"errUnexp", []string{"crypto/rand.unexp"}, false, nil, "could not resolve arguments"}, + {"errDirNotapkg", []string{"./test_notapkg"}, false, nil, `package not found: "./test_notapkg"`}, + } + + for _, tc := range tt { + tc := tc + t.Run(tc.name, func(t *testing.T) { + // Wd prefix mean test relative to local directory - + // mock change local dir by setting the fpAbs variable (see doc.go) to match + // testdata/integ/wd when we call it on ".". + if strings.HasPrefix(tc.args[0], "Wd") { + fpAbs = func(s string) (string, error) { return filepath.Clean(filepath.Join(path("wd"), s)), nil } + defer func() { fpAbs = filepath.Abs }() + } + result, err := ResolveDocumentable([]string{path("")}, tc.args, tc.unexp) + // we use stripFset because d.pkgData.fset contains sync/atomic values, + // which in turn makes reflect.DeepEqual compare the two sync.Atomic values. + assert.Equal(t, stripFset(tc.expect), stripFset(result), "documentables should match") + if tc.errContains == "" { + assert.NoError(t, err) + } else { + assert.ErrorContains(t, err, tc.errContains) + } + }) + } +} + +func stripFset(p Documentable) Documentable { + if d, ok := p.(*documentable); ok && d.pkgData != nil { + d.pkgData.fset = nil + } + return p +} + +func TestDocument(t *testing.T) { + // the format itself can change if the design is to be changed, + // we want to make sure that given information is available when calling + // Document. + abspath, err := filepath.Abs("./testdata/integ/crypto/rand") + require.NoError(t, err) + dir := bfsDir{ + importPath: "crypto/rand", + dir: abspath, + } + + tt := []struct { + name string + d *documentable + opts *WriteDocumentationOptions + contains []string + }{ + {"base", &documentable{bfsDir: dir}, nil, []string{"func Crypto", "!Crypto symbol", "func NewRand", "!unexp", "type Flag", "!Name"}}, + {"func", &documentable{bfsDir: dir, symbol: "crypto"}, nil, []string{"Crypto symbol", "func Crypto", "!func NewRand", "!type Flag"}}, + {"funcWriter", &documentable{bfsDir: dir, symbol: "NewWriter"}, nil, []string{"func NewWriter() io.Writer", "!func Crypto"}}, + {"tp", &documentable{bfsDir: dir, symbol: "Rand"}, nil, []string{"type Rand", "comment1", "!func Crypto", "!unexp ", "!comment4", "Has unexported"}}, + {"tpField", &documentable{bfsDir: dir, symbol: "Rand", accessible: "Value"}, nil, []string{"type Rand", "!comment1", "comment2", "!func Crypto", "!unexp", "elided"}}, + { + "tpUnexp", + &documentable{bfsDir: dir, symbol: "Rand"}, + &WriteDocumentationOptions{Unexported: true}, + []string{"type Rand", "comment1", "!func Crypto", "unexp ", "comment4", "!Has unexported"}, + }, + { + "symUnexp", + &documentable{bfsDir: dir, symbol: "unexp"}, + &WriteDocumentationOptions{Unexported: true}, + []string{"var unexp", "!type Rand", "!comment1", "!comment4", "!func Crypto", "!Has unexported"}, + }, + { + "fieldUnexp", + &documentable{bfsDir: dir, symbol: "Rand", accessible: "unexp"}, + &WriteDocumentationOptions{Unexported: true}, + []string{"type Rand", "!comment1", "comment4", "!func Crypto", "elided", "!Has unexported"}, + }, + } + + buf := &bytes.Buffer{} + for _, tc := range tt { + tc := tc + t.Run(tc.name, func(t *testing.T) { + buf.Reset() + err := tc.d.WriteDocumentation(buf, tc.opts) + require.NoError(t, err) + s := buf.String() + for _, c := range tc.contains { + if c[0] == '!' { + assert.NotContains(t, s, c[1:]) + } else { + assert.Contains(t, s, c) + } + } + }) + } +} + +func Test_parseArgParts(t *testing.T) { + tt := []struct { + name string + args []string + exp *docArgs + }{ + {"noArgs", []string{}, &docArgs{pkg: "."}}, + + {"oneAmbiguous", []string{"ambiguous"}, &docArgs{pkg: "ambiguous", pkgAmbiguous: true}}, + {"onePath", []string{"pkg/path"}, &docArgs{pkg: "pkg/path"}}, + {"oneSpecial", []string{".."}, &docArgs{pkg: ".."}}, + {"oneSpecial2", []string{"../../../.."}, &docArgs{pkg: "../../../.."}}, + {"oneSpecial3", []string{"../upper/.."}, &docArgs{pkg: "../upper/.."}}, + {"oneSpecial4", []string{"."}, &docArgs{pkg: "."}}, + + {"twoPkgSym", []string{"pkg.sym"}, &docArgs{pkg: "pkg", sym: "sym", pkgAmbiguous: true}}, + {"twoPkgPathSym", []string{"path/pkg.sym"}, &docArgs{pkg: "path/pkg", sym: "sym"}}, + {"twoPkgUpperSym", []string{"../pkg.sym"}, &docArgs{pkg: "../pkg", sym: "sym"}}, + {"twoPkgExportedSym", []string{"Writer.Write"}, &docArgs{pkg: ".", sym: "Writer", acc: "Write"}}, + {"twoPkgCapitalPathSym", []string{"Path/Capitalised.Sym"}, &docArgs{pkg: "Path/Capitalised", sym: "Sym"}}, + + {"threePkgSymAcc", []string{"pkg.sym.acc"}, &docArgs{pkg: "pkg", sym: "sym", acc: "acc"}}, + {"threePathPkgSymAcc", []string{"./pkg.sym.acc"}, &docArgs{pkg: "./pkg", sym: "sym", acc: "acc"}}, + {"threePathPkgSymAcc2", []string{"../pkg.sym.acc"}, &docArgs{pkg: "../pkg", sym: "sym", acc: "acc"}}, + {"threePathPkgSymAcc3", []string{"path/to/pkg.sym.acc"}, &docArgs{pkg: "path/to/pkg", sym: "sym", acc: "acc"}}, + {"threePathPkgSymAcc4", []string{"path/../to/pkg.sym.acc"}, &docArgs{pkg: "path/../to/pkg", sym: "sym", acc: "acc"}}, + + // the logic on the split is pretty unambiguously that the first argument + // is the path, so we can afford to be less thorough on that regard. + {"splitTwo", []string{"io", "Writer"}, &docArgs{pkg: "io", sym: "Writer"}}, + {"splitThree", []string{"io", "Writer.Write"}, &docArgs{pkg: "io", sym: "Writer", acc: "Write"}}, + + {"errTooManyDots", []string{"io.Writer.Write.Impossible"}, nil}, + {"errTooManyDotsSplit", []string{"io", "Writer.Write.Impossible"}, nil}, + {"errTooManyArgs", []string{"io", "Writer", "Write"}, nil}, + } + for _, tc := range tt { + tc := tc + t.Run(tc.name, func(t *testing.T) { + p, ok := parseArgs(tc.args) + if ok { + _ = assert.NotNil(t, tc.exp, "parseArgs is successful when should have failed") && + assert.Equal(t, *tc.exp, p) + } else { + assert.Nil(t, tc.exp, "parseArgs is unsuccessful") + } + }) + } +} diff --git a/gnovm/pkg/doc/pkg.go b/gnovm/pkg/doc/pkg.go new file mode 100644 index 00000000000..71e1a50f299 --- /dev/null +++ b/gnovm/pkg/doc/pkg.go @@ -0,0 +1,229 @@ +package doc + +import ( + "fmt" + "go/ast" + "go/doc" + "go/parser" + "go/token" + "os" + "path/filepath" + "strings" +) + +type pkgData struct { + name string + dir bfsDir + fset *token.FileSet + files []*ast.File + testFiles []*ast.File + symbols []symbolData +} + +const ( + symbolDataValue byte = iota + symbolDataType + symbolDataFunc + symbolDataMethod + symbolDataStructField + symbolDataInterfaceMethod +) + +type symbolData struct { + symbol string + accessible string + typ byte +} + +func newPkgData(dir bfsDir, unexported bool) (*pkgData, error) { + files, err := os.ReadDir(dir.dir) + if err != nil { + return nil, fmt.Errorf("commands/doc: open %q: %w", dir.dir, err) + } + pkg := &pkgData{ + dir: dir, + fset: token.NewFileSet(), + } + for _, file := range files { + n := file.Name() + // Ignore files with prefix . or _ like go tools do. + // Ignore _filetest.gno, but not _test.gno, as we use those to compute + // examples. + if file.IsDir() || + !strings.HasSuffix(n, ".gno") || + strings.HasPrefix(n, ".") || + strings.HasPrefix(n, "_") || + strings.HasSuffix(n, "_filetest.gno") { + continue + } + fullPath := filepath.Join(dir.dir, n) + err := pkg.parseFile(fullPath, unexported) + if err != nil { + return nil, fmt.Errorf("commands/doc: parse file %q: %w", fullPath, err) + } + } + + if len(pkg.files) == 0 { + return nil, fmt.Errorf("commands/doc: no valid gno files in %q", dir.dir) + } + pkgName := pkg.files[0].Name.Name + for _, file := range pkg.files[1:] { + if file.Name.Name != pkgName { + return nil, fmt.Errorf("commands/doc: multiple packages (%q / %q) in dir %q", pkgName, file.Name.Name, dir.dir) + } + } + pkg.name = pkgName + + return pkg, nil +} + +func (pkg *pkgData) parseFile(fileName string, unexported bool) error { + f, err := os.Open(fileName) + if err != nil { + return err + } + defer f.Close() + astf, err := parser.ParseFile(pkg.fset, filepath.Base(fileName), f, parser.ParseComments) + if err != nil { + return err + } + if strings.HasSuffix(fileName, "_test.gno") { + // add test files separately - we should not add their symbols to the package. + pkg.testFiles = append(pkg.testFiles, astf) + return nil + } + pkg.files = append(pkg.files, astf) + + // add symbols + for _, decl := range astf.Decls { + switch x := decl.(type) { + case *ast.FuncDecl: + // prepend receiver if this is a method + sd := symbolData{ + symbol: x.Name.Name, + typ: symbolDataFunc, + } + if x.Recv != nil { + sd.symbol, sd.accessible = typeExprString(x.Recv.List[0].Type), sd.symbol + if !unexported && !token.IsExported(sd.symbol) { + continue + } + sd.typ = symbolDataMethod + } + pkg.symbols = append(pkg.symbols, sd) + case *ast.GenDecl: + for _, spec := range x.Specs { + pkg.appendSpec(spec, unexported) + } + } + } + return nil +} + +func (pkg *pkgData) appendSpec(spec ast.Spec, unexported bool) { + switch s := spec.(type) { + case *ast.TypeSpec: + if !unexported && !token.IsExported(s.Name.Name) { + return + } + pkg.symbols = append(pkg.symbols, symbolData{symbol: s.Name.Name, typ: symbolDataType}) + switch st := s.Type.(type) { + case *ast.StructType: + pkg.appendFieldList(s.Name.Name, st.Fields, unexported, symbolDataStructField) + case *ast.InterfaceType: + pkg.appendFieldList(s.Name.Name, st.Methods, unexported, symbolDataInterfaceMethod) + } + case *ast.ValueSpec: + for _, name := range s.Names { + if !unexported && !token.IsExported(name.Name) { + continue + } + pkg.symbols = append(pkg.symbols, symbolData{symbol: name.Name, typ: symbolDataValue}) + } + } +} + +func (pkg *pkgData) appendFieldList(tName string, fl *ast.FieldList, unexported bool, typ byte) { + if fl == nil { + return + } + for _, field := range fl.List { + if field.Names == nil { + if typ == symbolDataInterfaceMethod { + continue + } + embName := typeExprString(field.Type) + if !unexported && !token.IsExported(embName) { + continue + } + // embedded struct + pkg.symbols = append(pkg.symbols, symbolData{symbol: tName, accessible: embName, typ: typ}) + continue + } + for _, name := range field.Names { + if !unexported && !token.IsExported(name.Name) { + continue + } + pkg.symbols = append(pkg.symbols, symbolData{symbol: tName, accessible: name.Name, typ: typ}) + } + } +} + +func typeExprString(expr ast.Expr) string { + if expr == nil { + return "" + } + + switch t := expr.(type) { + case *ast.Ident: + return t.Name + case *ast.StarExpr: + return typeExprString(t.X) + } + return "" +} + +func (pkg *pkgData) docPackage(opts *WriteDocumentationOptions) (*ast.Package, *doc.Package, error) { + // largely taken from go/doc.NewFromFiles source + + // Collect .gno files in a map for ast.NewPackage. + fileMap := make(map[string]*ast.File) + for i, file := range pkg.files { + f := pkg.fset.File(file.Pos()) + if f == nil { + return nil, nil, fmt.Errorf("commands/doc: file pkg.files[%d] is not found in the provided file set", i) + } + fileMap[f.Name()] = file + } + + // from cmd/doc/pkg.go: + // go/doc does not include typed constants in the constants + // list, which is what we want. For instance, time.Sunday is of type + // time.Weekday, so it is defined in the type but not in the + // Consts list for the package. This prevents + // go doc time.Sunday + // from finding the symbol. This is why we always have AllDecls. + mode := doc.AllDecls + if opts.Source { + mode |= doc.PreserveAST + } + + // Compute package documentation. + // Assign to blank to ignore errors that can happen due to unresolved identifiers. + astpkg, _ := ast.NewPackage(pkg.fset, fileMap, simpleImporter, nil) + p := doc.New(astpkg, pkg.dir.importPath, mode) + // TODO: classifyExamples(p, Examples(testGoFiles...)) + + return astpkg, p, nil +} + +func simpleImporter(imports map[string]*ast.Object, path string) (*ast.Object, error) { + pkg := imports[path] + if pkg == nil { + // note that strings.LastIndex returns -1 if there is no "/" + pkg = ast.NewObj(ast.Pkg, path[strings.LastIndex(path, "/")+1:]) + pkg.Data = ast.NewScope(nil) // required by ast.NewPackage for dot-import + imports[path] = pkg + } + return pkg, nil +} diff --git a/gnovm/pkg/doc/print.go b/gnovm/pkg/doc/print.go new file mode 100644 index 00000000000..7ac1742c62f --- /dev/null +++ b/gnovm/pkg/doc/print.go @@ -0,0 +1,984 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Copied and modified from Go source: cmd/doc/pkg.go +// Modifications done include: +// - Removing code for supporting documenting commands +// - Removing code for supporting import commands + +package doc + +import ( + "bufio" + "bytes" + "errors" + "fmt" + "go/ast" + "go/doc" + "go/format" + "go/printer" + "go/token" + "io" + "log" + "strings" + "unicode" + "unicode/utf8" +) + +const ( + punchedCardWidth = 80 + indent = " " +) + +type pkgPrinter struct { + name string // Package name, json for encoding/json. + pkg *ast.Package // Parsed package. + file *ast.File // Merged from all files in the package + doc *doc.Package + typedValue map[*doc.Value]bool // Consts and vars related to types. + constructor map[*doc.Func]bool // Constructors. + fs *token.FileSet // Needed for printing. + buf pkgBuffer + opt *WriteDocumentationOptions + importPath string + + // this is set when an error should be returned up the call chain. + // it is set together with a panic(errFatal), so it can be checked easily + // when calling recover. + err error +} + +func (pkg *pkgPrinter) isExported(name string) bool { + // cmd/doc uses a global here, so we change this to be a method. + return pkg.opt.Unexported || token.IsExported(name) +} + +func (pkg *pkgPrinter) ToText(w io.Writer, text, prefix, codePrefix string) { + d := pkg.doc.Parser().Parse(text) + pr := pkg.doc.Printer() + pr.TextPrefix = prefix + pr.TextCodePrefix = codePrefix + w.Write(pr.Text(d)) +} + +// pkgBuffer is a wrapper for bytes.Buffer that prints a package clause the +// first time Write is called. +type pkgBuffer struct { + pkg *pkgPrinter + printed bool // Prevent repeated package clauses. + bytes.Buffer +} + +func (pb *pkgBuffer) Write(p []byte) (int, error) { + pb.packageClause() + return pb.Buffer.Write(p) +} + +func (pb *pkgBuffer) packageClause() { + if !pb.printed { + pb.printed = true + pb.pkg.packageClause() + } +} + +var errFatal = errors.New("pkg/doc: pkgPrinter.Fatalf called") + +// in cmd/go, pkg.Fatalf is like log.Fatalf, but panics so it can be recovered in the +// main do function, so it doesn't cause an exit. Allows testing to work +// without running a subprocess. +// For our purposes, we store the error in .err - the caller knows about this and will check it. +func (pkg *pkgPrinter) Fatalf(format string, args ...any) { + pkg.err = fmt.Errorf(format, args...) + panic(errFatal) +} + +func (pkg *pkgPrinter) Printf(format string, args ...any) { + fmt.Fprintf(&pkg.buf, format, args...) +} + +func (pkg *pkgPrinter) flush() error { + _, err := pkg.opt.w.Write(pkg.buf.Bytes()) + if err != nil { + return err + } + pkg.buf.Reset() // Not needed, but it's a flush. + return nil +} + +var newlineBytes = []byte("\n\n") // We never ask for more than 2. + +// newlines guarantees there are n newlines at the end of the buffer. +func (pkg *pkgPrinter) newlines(n int) { + for !bytes.HasSuffix(pkg.buf.Bytes(), newlineBytes[:n]) { + pkg.buf.WriteRune('\n') + } +} + +// emit prints the node. If pkg.opt.Source is true, it ignores the provided comment, +// assuming the comment is in the node itself. Otherwise, the go/doc package +// clears the stuff we don't want to print anyway. It's a bit of a magic trick. +func (pkg *pkgPrinter) emit(comment string, node ast.Node) { + if node != nil { + var arg any = node + if pkg.opt.Source { + // Need an extra little dance to get internal comments to appear. + arg = &printer.CommentedNode{ + Node: node, + Comments: pkg.file.Comments, + } + } + err := format.Node(&pkg.buf, pkg.fs, arg) + if err != nil { + pkg.Fatalf("%v", err) + } + if comment != "" && !pkg.opt.Source { + pkg.newlines(1) + pkg.ToText(&pkg.buf, comment, indent, indent+indent) + pkg.newlines(2) // Blank line after comment to separate from next item. + } else { + pkg.newlines(1) + } + } +} + +// oneLineNode returns a one-line summary of the given input node. +func (pkg *pkgPrinter) oneLineNode(node ast.Node) string { + const maxDepth = 10 + return pkg.oneLineNodeDepth(node, maxDepth) +} + +// oneLineNodeDepth returns a one-line summary of the given input node. +// The depth specifies the maximum depth when traversing the AST. +func (pkg *pkgPrinter) oneLineNodeDepth(node ast.Node, depth int) string { + const dotDotDot = "..." + if depth == 0 { + return dotDotDot + } + depth-- + + switch n := node.(type) { + case nil: + return "" + + case *ast.GenDecl: + // Formats const and var declarations. + trailer := "" + if len(n.Specs) > 1 { + trailer = " " + dotDotDot + } + + // Find the first relevant spec. + typ := "" + for i, spec := range n.Specs { + valueSpec := spec.(*ast.ValueSpec) // Must succeed; we can't mix types in one GenDecl. + + // The type name may carry over from a previous specification in the + // case of constants and iota. + if valueSpec.Type != nil { + typ = fmt.Sprintf(" %s", pkg.oneLineNodeDepth(valueSpec.Type, depth)) + } else if len(valueSpec.Values) > 0 { + typ = "" + } + + if !pkg.isExported(valueSpec.Names[0].Name) { + continue + } + val := "" + if i < len(valueSpec.Values) && valueSpec.Values[i] != nil { + val = fmt.Sprintf(" = %s", pkg.oneLineNodeDepth(valueSpec.Values[i], depth)) + } + return fmt.Sprintf("%s %s%s%s%s", n.Tok, valueSpec.Names[0], typ, val, trailer) + } + return "" + + case *ast.FuncDecl: + // Formats func declarations. + name := n.Name.Name + recv := pkg.oneLineNodeDepth(n.Recv, depth) + if len(recv) > 0 { + recv = "(" + recv + ") " + } + fnc := pkg.oneLineNodeDepth(n.Type, depth) + fnc = strings.TrimPrefix(fnc, "func") + return fmt.Sprintf("func %s%s%s", recv, name, fnc) + + case *ast.TypeSpec: + sep := " " + if n.Assign.IsValid() { + sep = " = " + } + tparams := pkg.formatTypeParams(n.TypeParams, depth) + return fmt.Sprintf("type %s%s%s%s", n.Name.Name, tparams, sep, pkg.oneLineNodeDepth(n.Type, depth)) + + case *ast.FuncType: + var params []string + if n.Params != nil { + for _, field := range n.Params.List { + params = append(params, pkg.oneLineField(field, depth)) + } + } + needParens := false + var results []string + if n.Results != nil { + needParens = needParens || len(n.Results.List) > 1 + for _, field := range n.Results.List { + needParens = needParens || len(field.Names) > 0 + results = append(results, pkg.oneLineField(field, depth)) + } + } + + tparam := pkg.formatTypeParams(n.TypeParams, depth) + param := joinStrings(params) + if len(results) == 0 { + return fmt.Sprintf("func%s(%s)", tparam, param) + } + result := joinStrings(results) + if !needParens { + return fmt.Sprintf("func%s(%s) %s", tparam, param, result) + } + return fmt.Sprintf("func%s(%s) (%s)", tparam, param, result) + + case *ast.StructType: + if n.Fields == nil || len(n.Fields.List) == 0 { + return "struct{}" + } + return "struct{ ... }" + + case *ast.InterfaceType: + if n.Methods == nil || len(n.Methods.List) == 0 { + return "interface{}" + } + return "interface{ ... }" + + case *ast.FieldList: + if n == nil || len(n.List) == 0 { + return "" + } + if len(n.List) == 1 { + return pkg.oneLineField(n.List[0], depth) + } + return dotDotDot + + case *ast.FuncLit: + return pkg.oneLineNodeDepth(n.Type, depth) + " { ... }" + + case *ast.CompositeLit: + typ := pkg.oneLineNodeDepth(n.Type, depth) + if len(n.Elts) == 0 { + return fmt.Sprintf("%s{}", typ) + } + return fmt.Sprintf("%s{ %s }", typ, dotDotDot) + + case *ast.ArrayType: + length := pkg.oneLineNodeDepth(n.Len, depth) + element := pkg.oneLineNodeDepth(n.Elt, depth) + return fmt.Sprintf("[%s]%s", length, element) + + case *ast.MapType: + key := pkg.oneLineNodeDepth(n.Key, depth) + value := pkg.oneLineNodeDepth(n.Value, depth) + return fmt.Sprintf("map[%s]%s", key, value) + + case *ast.CallExpr: + fnc := pkg.oneLineNodeDepth(n.Fun, depth) + var args []string + for _, arg := range n.Args { + args = append(args, pkg.oneLineNodeDepth(arg, depth)) + } + return fmt.Sprintf("%s(%s)", fnc, joinStrings(args)) + + case *ast.UnaryExpr: + return fmt.Sprintf("%s%s", n.Op, pkg.oneLineNodeDepth(n.X, depth)) + + case *ast.Ident: + return n.Name + + default: + // As a fallback, use default formatter for all unknown node types. + buf := new(strings.Builder) + format.Node(buf, pkg.fs, node) + s := buf.String() + if strings.Contains(s, "\n") { + return dotDotDot + } + return s + } +} + +func (pkg *pkgPrinter) formatTypeParams(list *ast.FieldList, depth int) string { + if list.NumFields() == 0 { + return "" + } + tparams := make([]string, 0, len(list.List)) + for _, field := range list.List { + tparams = append(tparams, pkg.oneLineField(field, depth)) + } + return "[" + joinStrings(tparams) + "]" +} + +// oneLineField returns a one-line summary of the field. +func (pkg *pkgPrinter) oneLineField(field *ast.Field, depth int) string { + names := make([]string, 0, len(field.Names)) + for _, name := range field.Names { + names = append(names, name.Name) + } + if len(names) == 0 { + return pkg.oneLineNodeDepth(field.Type, depth) + } + return joinStrings(names) + " " + pkg.oneLineNodeDepth(field.Type, depth) +} + +// joinStrings formats the input as a comma-separated list, +// but truncates the list at some reasonable length if necessary. +func joinStrings(ss []string) string { + var n int + for i, s := range ss { + n += len(s) + len(", ") + if n > punchedCardWidth { + ss = append(ss[:i:i], "...") + break + } + } + return strings.Join(ss, ", ") +} + +// allDoc prints all the docs for the package. +func (pkg *pkgPrinter) allDoc() { + pkg.Printf("") // Trigger the package clause; we know the package exists. + pkg.ToText(&pkg.buf, pkg.doc.Doc, "", indent) + pkg.newlines(1) + + printed := make(map[*ast.GenDecl]bool) + + hdr := "" + printHdr := func(s string) { + if hdr != s { + pkg.Printf("\n%s\n\n", s) + hdr = s + } + } + + // Constants. + for _, value := range pkg.doc.Consts { + // Constants and variables come in groups, and valueDoc prints + // all the items in the group. We only need to find one exported symbol. + for _, name := range value.Names { + if pkg.isExported(name) && !pkg.typedValue[value] { + printHdr("CONSTANTS") + pkg.valueDoc(value, printed) + break + } + } + } + + // Variables. + for _, value := range pkg.doc.Vars { + // Constants and variables come in groups, and valueDoc prints + // all the items in the group. We only need to find one exported symbol. + for _, name := range value.Names { + if pkg.isExported(name) && !pkg.typedValue[value] { + printHdr("VARIABLES") + pkg.valueDoc(value, printed) + break + } + } + } + + // Functions. + for _, fun := range pkg.doc.Funcs { + if pkg.isExported(fun.Name) && !pkg.constructor[fun] { + printHdr("FUNCTIONS") + pkg.emit(fun.Doc, fun.Decl) + } + } + + // Types. + for _, typ := range pkg.doc.Types { + if pkg.isExported(typ.Name) { + printHdr("TYPES") + pkg.typeDoc(typ) + } + } +} + +// packageDoc prints the docs for the package (package doc plus one-liners of the rest). +func (pkg *pkgPrinter) packageDoc() { + pkg.Printf("") // Trigger the package clause; we know the package exists. + if !pkg.opt.Short { + pkg.ToText(&pkg.buf, pkg.doc.Doc, "", indent) + pkg.newlines(1) + } + + if !pkg.opt.Short { + pkg.newlines(2) // Guarantee blank line before the components. + } + + pkg.valueSummary(pkg.doc.Consts, false) + pkg.valueSummary(pkg.doc.Vars, false) + pkg.funcSummary(pkg.doc.Funcs, false) + pkg.typeSummary() + if !pkg.opt.Short { + pkg.bugs() + } +} + +// packageClause prints the package clause. +func (pkg *pkgPrinter) packageClause() { + if pkg.opt.Short { + return + } + + // If we're using modules, the import path derived from module code locations wins. + // If we did a file system scan, we knew the import path when we found the directory. + // But if we started with a directory name, we never knew the import path. + // Either way, we don't know it now, and it's cheap to (re)compute it. + /* TODO: add when supporting gnodev doc on local directories + if usingModules { + for _, root := range codeRoots() { + if pkg.build.Dir == root.dir { + importPath = root.importPath + break + } + if strings.HasPrefix(pkg.build.Dir, root.dir+string(filepath.Separator)) { + suffix := filepath.ToSlash(pkg.build.Dir[len(root.dir)+1:]) + if root.importPath == "" { + importPath = suffix + } else { + importPath = root.importPath + "/" + suffix + } + break + } + } + } + */ + + pkg.Printf("package %s // import %q\n\n", pkg.name, pkg.importPath) + /* TODO + if !usingModules && importPath != pkg.build.ImportPath { + pkg.Printf("WARNING: package source is installed in %q\n", pkg.build.ImportPath) + } */ +} + +// valueSummary prints a one-line summary for each set of values and constants. +// If all the types in a constant or variable declaration belong to the same +// type they can be printed by typeSummary, and so can be suppressed here. +func (pkg *pkgPrinter) valueSummary(values []*doc.Value, showGrouped bool) { + var isGrouped map[*doc.Value]bool + if !showGrouped { + isGrouped = make(map[*doc.Value]bool) + for _, typ := range pkg.doc.Types { + if !pkg.isExported(typ.Name) { + continue + } + for _, c := range typ.Consts { + isGrouped[c] = true + } + for _, v := range typ.Vars { + isGrouped[v] = true + } + } + } + + for _, value := range values { + if !isGrouped[value] { + if decl := pkg.oneLineNode(value.Decl); decl != "" { + pkg.Printf("%s\n", decl) + } + } + } +} + +// funcSummary prints a one-line summary for each function. Constructors +// are printed by typeSummary, below, and so can be suppressed here. +func (pkg *pkgPrinter) funcSummary(funcs []*doc.Func, showConstructors bool) { + for _, fun := range funcs { + // Exported functions only. The go/doc package does not include methods here. + if pkg.isExported(fun.Name) { + if showConstructors || !pkg.constructor[fun] { + pkg.Printf("%s\n", pkg.oneLineNode(fun.Decl)) + } + } + } +} + +// typeSummary prints a one-line summary for each type, followed by its constructors. +func (pkg *pkgPrinter) typeSummary() { + for _, typ := range pkg.doc.Types { + for _, spec := range typ.Decl.Specs { + typeSpec := spec.(*ast.TypeSpec) // Must succeed. + if pkg.isExported(typeSpec.Name.Name) { + pkg.Printf("%s\n", pkg.oneLineNode(typeSpec)) + // Now print the consts, vars, and constructors. + for _, c := range typ.Consts { + if decl := pkg.oneLineNode(c.Decl); decl != "" { + pkg.Printf(indent+"%s\n", decl) + } + } + for _, v := range typ.Vars { + if decl := pkg.oneLineNode(v.Decl); decl != "" { + pkg.Printf(indent+"%s\n", decl) + } + } + for _, constructor := range typ.Funcs { + if pkg.isExported(constructor.Name) { + pkg.Printf(indent+"%s\n", pkg.oneLineNode(constructor.Decl)) + } + } + } + } + } +} + +// bugs prints the BUGS information for the package. +// TODO: Provide access to TODOs and NOTEs as well (very noisy so off by default)? +func (pkg *pkgPrinter) bugs() { + if pkg.doc.Notes["BUG"] == nil { + return + } + pkg.Printf("\n") + for _, note := range pkg.doc.Notes["BUG"] { + pkg.Printf("%s: %v\n", "BUG", note.Body) + } +} + +// findValues finds the doc.Values that describe the symbol. +func (pkg *pkgPrinter) findValues(symbol string, docValues []*doc.Value) (values []*doc.Value) { + for _, value := range docValues { + for _, name := range value.Names { + if pkg.match(symbol, name) { + values = append(values, value) + } + } + } + return +} + +// findFuncs finds the doc.Funcs that describes the symbol. +func (pkg *pkgPrinter) findFuncs(symbol string) (funcs []*doc.Func) { + for _, fun := range pkg.doc.Funcs { + if pkg.match(symbol, fun.Name) { + funcs = append(funcs, fun) + } + } + return +} + +// findTypes finds the doc.Types that describes the symbol. +// If symbol is empty, it finds all exported types. +func (pkg *pkgPrinter) findTypes(symbol string) (types []*doc.Type) { + for _, typ := range pkg.doc.Types { + if symbol == "" && pkg.isExported(typ.Name) || pkg.match(symbol, typ.Name) { + types = append(types, typ) + } + } + return +} + +// findTypeSpec returns the ast.TypeSpec within the declaration that defines the symbol. +// The name must match exactly. +func (pkg *pkgPrinter) findTypeSpec(decl *ast.GenDecl, symbol string) *ast.TypeSpec { + for _, spec := range decl.Specs { + typeSpec := spec.(*ast.TypeSpec) // Must succeed. + if symbol == typeSpec.Name.Name { + return typeSpec + } + } + return nil +} + +// symbolDoc prints the docs for symbol. There may be multiple matches. +// If symbol matches a type, output includes its methods factories and associated constants. +// If there is no top-level symbol, symbolDoc looks for methods that match. +func (pkg *pkgPrinter) symbolDoc(symbol string) bool { + found := false + // Functions. + for _, fun := range pkg.findFuncs(symbol) { + // Symbol is a function. + decl := fun.Decl + pkg.emit(fun.Doc, decl) + found = true + } + // Constants and variables behave the same. + values := pkg.findValues(symbol, pkg.doc.Consts) + values = append(values, pkg.findValues(symbol, pkg.doc.Vars)...) + // A declaration like + // const ( c = 1; C = 2 ) + // could be printed twice if the -u flag is set, as it matches twice. + // So we remember which declarations we've printed to avoid duplication. + printed := make(map[*ast.GenDecl]bool) + for _, value := range values { + pkg.valueDoc(value, printed) + found = true + } + // Types. + for _, typ := range pkg.findTypes(symbol) { + pkg.typeDoc(typ) + found = true + } + if !found { + // See if there are methods. + if !pkg.printMethodDoc("", symbol) { + return false + } + } + return true +} + +// valueDoc prints the docs for a constant or variable. +func (pkg *pkgPrinter) valueDoc(value *doc.Value, printed map[*ast.GenDecl]bool) { + if printed[value.Decl] { + return + } + // Print each spec only if there is at least one exported symbol in it. + // (See issue 11008.) + // TODO: Should we elide unexported symbols from a single spec? + // It's an unlikely scenario, probably not worth the trouble. + // TODO: Would be nice if go/doc did this for us. + specs := make([]ast.Spec, 0, len(value.Decl.Specs)) + var typ ast.Expr + for _, spec := range value.Decl.Specs { + vspec := spec.(*ast.ValueSpec) + + // The type name may carry over from a previous specification in the + // case of constants and iota. + if vspec.Type != nil { + typ = vspec.Type + } + + for _, ident := range vspec.Names { + if pkg.opt.Source || pkg.isExported(ident.Name) { + if vspec.Type == nil && vspec.Values == nil && typ != nil { + // This a standalone identifier, as in the case of iota usage. + // Thus, assume the type comes from the previous type. + vspec.Type = &ast.Ident{ + Name: pkg.oneLineNode(typ), + NamePos: vspec.End() - 1, + } + } + + specs = append(specs, vspec) + typ = nil // Only inject type on first exported identifier + break + } + } + } + if len(specs) == 0 { + return + } + value.Decl.Specs = specs + pkg.emit(value.Doc, value.Decl) + printed[value.Decl] = true +} + +// typeDoc prints the docs for a type, including constructors and other items +// related to it. +func (pkg *pkgPrinter) typeDoc(typ *doc.Type) { + decl := typ.Decl + spec := pkg.findTypeSpec(decl, typ.Name) + pkg.trimUnexportedElems(spec) + // If there are multiple types defined, reduce to just this one. + if len(decl.Specs) > 1 { + decl.Specs = []ast.Spec{spec} + } + pkg.emit(typ.Doc, decl) + pkg.newlines(2) + // Show associated methods, constants, etc. + if pkg.opt.ShowAll { + printed := make(map[*ast.GenDecl]bool) + // We can use append here to print consts, then vars. Ditto for funcs and methods. + values := typ.Consts + values = append(values, typ.Vars...) + for _, value := range values { + for _, name := range value.Names { + if pkg.isExported(name) { + pkg.valueDoc(value, printed) + break + } + } + } + funcs := typ.Funcs + funcs = append(funcs, typ.Methods...) + for _, fun := range funcs { + if pkg.isExported(fun.Name) { + pkg.emit(fun.Doc, fun.Decl) + if fun.Doc == "" { + pkg.newlines(2) + } + } + } + } else { + pkg.valueSummary(typ.Consts, true) + pkg.valueSummary(typ.Vars, true) + pkg.funcSummary(typ.Funcs, true) + pkg.funcSummary(typ.Methods, true) + } +} + +// trimUnexportedElems modifies spec in place to elide unexported fields from +// structs and methods from interfaces (unless the unexported flag is set or we +// are asked to show the original source). +func (pkg *pkgPrinter) trimUnexportedElems(spec *ast.TypeSpec) { + if pkg.opt.Unexported || pkg.opt.Source { + return + } + switch typ := spec.Type.(type) { + case *ast.StructType: + typ.Fields = pkg.trimUnexportedFields(typ.Fields, false) + case *ast.InterfaceType: + typ.Methods = pkg.trimUnexportedFields(typ.Methods, true) + } +} + +// trimUnexportedFields returns the field list trimmed of unexported fields. +func (pkg *pkgPrinter) trimUnexportedFields(fields *ast.FieldList, isInterface bool) *ast.FieldList { + what := "methods" + if !isInterface { + what = "fields" + } + + trimmed := false + list := make([]*ast.Field, 0, len(fields.List)) + for _, field := range fields.List { + names := field.Names + if len(names) == 0 { + // Embedded type. Use the name of the type. It must be of the form ident or + // pkg.ident (for structs and interfaces), or *ident or *pkg.ident (structs only). + // Or a type embedded in a constraint. + // Nothing else is allowed. + ty := field.Type + if se, ok := field.Type.(*ast.StarExpr); !isInterface && ok { + // The form *ident or *pkg.ident is only valid on + // embedded types in structs. + ty = se.X + } + constraint := false + switch ident := ty.(type) { + case *ast.Ident: + if isInterface && ident.Name == "error" && ident.Obj == nil { + // For documentation purposes, we consider the builtin error + // type special when embedded in an interface, such that it + // always gets shown publicly. + list = append(list, field) + continue + } + names = []*ast.Ident{ident} + case *ast.SelectorExpr: + // An embedded type may refer to a type in another package. + names = []*ast.Ident{ident.Sel} + default: + // An approximation or union or type + // literal in an interface. + constraint = true + } + if names == nil && !constraint { + // Can only happen if AST is incorrect. Safe to continue with a nil list. + log.Print("warning: invalid program: unexpected type for embedded field") + } + } + // Trims if any is unexported. Good enough in practice. + ok := true + for _, name := range names { + if !pkg.isExported(name.Name) { + trimmed = true + ok = false + break + } + } + if ok { + list = append(list, field) + } + } + if !trimmed { + return fields + } + unexportedField := &ast.Field{ + Type: &ast.Ident{ + // Hack: printer will treat this as a field with a named type. + // Setting Name and NamePos to ("", fields.Closing-1) ensures that + // when Pos and End are called on this field, they return the + // position right before closing '}' character. + Name: "", + NamePos: fields.Closing - 1, + }, + Comment: &ast.CommentGroup{ + List: []*ast.Comment{{Text: fmt.Sprintf("// Has unexported %s.\n", what)}}, + }, + } + return &ast.FieldList{ + Opening: fields.Opening, + List: append(list, unexportedField), + Closing: fields.Closing, + } +} + +// printMethodDoc prints the docs for matches of symbol.method. +// If symbol is empty, it prints all methods for any concrete type +// that match the name. It reports whether it found any methods. +func (pkg *pkgPrinter) printMethodDoc(symbol, method string) bool { + types := pkg.findTypes(symbol) + if types == nil { + if symbol == "" { + return false + } + pkg.Fatalf("symbol %s is not a type in package %s installed in %q", symbol, pkg.name, pkg.importPath) + } + found := false + for _, typ := range types { + if len(typ.Methods) > 0 { + for _, meth := range typ.Methods { + if pkg.match(method, meth.Name) { + decl := meth.Decl + pkg.emit(meth.Doc, decl) + found = true + } + } + continue + } + if symbol == "" { + continue + } + // Type may be an interface. The go/doc package does not attach + // an interface's methods to the doc.Type. We need to dig around. + spec := pkg.findTypeSpec(typ.Decl, typ.Name) + inter, ok := spec.Type.(*ast.InterfaceType) + if !ok { + // Not an interface type. + continue + } + + // Collect and print only the methods that match. + var methods []*ast.Field + for _, iMethod := range inter.Methods.List { + // This is an interface, so there can be only one name. + // TODO: Anonymous methods (embedding) + if len(iMethod.Names) == 0 { + continue + } + name := iMethod.Names[0].Name + if pkg.match(method, name) { + methods = append(methods, iMethod) + found = true + } + } + if found { + pkg.Printf("type %s ", spec.Name) + inter.Methods.List, methods = methods, inter.Methods.List + err := format.Node(&pkg.buf, pkg.fs, inter) + if err != nil { + pkg.Fatalf("%v", err) + } + pkg.newlines(1) + // Restore the original methods. + inter.Methods.List = methods + } + } + return found +} + +// printFieldDoc prints the docs for matches of symbol.fieldName. +// It reports whether it found any field. +// Both symbol and fieldName must be non-empty or it returns false. +func (pkg *pkgPrinter) printFieldDoc(symbol, fieldName string) bool { + if symbol == "" || fieldName == "" { + return false + } + types := pkg.findTypes(symbol) + if types == nil { + pkg.Fatalf("symbol %s is not a type in package %s installed in %q", symbol, pkg.name, pkg.importPath) + } + found := false + numUnmatched := 0 + for _, typ := range types { + // Type must be a struct. + spec := pkg.findTypeSpec(typ.Decl, typ.Name) + structType, ok := spec.Type.(*ast.StructType) + if !ok { + // Not a struct type. + continue + } + for _, field := range structType.Fields.List { + // TODO: Anonymous fields. + for _, name := range field.Names { + if !pkg.match(fieldName, name.Name) { + numUnmatched++ + continue + } + if !found { + pkg.Printf("type %s struct {\n", typ.Name) + } + if field.Doc != nil { + // To present indented blocks in comments correctly, process the comment as + // a unit before adding the leading // to each line. + docBuf := new(bytes.Buffer) + pkg.ToText(docBuf, field.Doc.Text(), "", indent) + scanner := bufio.NewScanner(docBuf) + for scanner.Scan() { + fmt.Fprintf(&pkg.buf, "%s// %s\n", indent, scanner.Bytes()) + } + } + s := pkg.oneLineNode(field.Type) + lineComment := "" + if field.Comment != nil { + lineComment = fmt.Sprintf(" %s", field.Comment.List[0].Text) + } + pkg.Printf("%s%s %s%s\n", indent, name, s, lineComment) + found = true + } + } + } + if found { + if numUnmatched > 0 { + pkg.Printf("\n // ... other fields elided ...\n") + } + pkg.Printf("}\n") + } + return found +} + +// methodDoc prints the docs for matches of symbol.method. +func (pkg *pkgPrinter) methodDoc(symbol, method string) bool { + return pkg.printMethodDoc(symbol, method) +} + +// fieldDoc prints the docs for matches of symbol.field. +func (pkg *pkgPrinter) fieldDoc(symbol, field string) bool { + return pkg.printFieldDoc(symbol, field) +} + +func (pkg *pkgPrinter) match(user, program string) bool { + if !pkg.isExported(program) { + return false + } + return symbolMatch(user, program) +} + +// match reports whether the user's symbol matches the program's. +// A lower-case character in the user's string matches either case in the program's. +func symbolMatch(user, program string) bool { + /* TODO: might be useful to add for tooling. + if matchCase { + return user == program + } */ + for _, u := range user { + p, w := utf8.DecodeRuneInString(program) + program = program[w:] + if u == p { + continue + } + if unicode.IsLower(u) && simpleFold(u) == simpleFold(p) { + continue + } + return false + } + return program == "" +} + +// simpleFold returns the minimum rune equivalent to r +// under Unicode-defined simple case folding. +func simpleFold(r rune) rune { + for { + r1 := unicode.SimpleFold(r) + if r1 <= r { + return r1 // wrapped around, found min + } + r = r1 + } +} diff --git a/gnovm/pkg/doc/test_notapkg/.keep b/gnovm/pkg/doc/test_notapkg/.keep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/gnovm/pkg/doc/testdata/dirs/crypto/crypto.gno b/gnovm/pkg/doc/testdata/dirs/crypto/crypto.gno new file mode 100644 index 00000000000..e69de29bb2d diff --git a/gnovm/pkg/doc/testdata/dirs/crypto/rand/rand.gno b/gnovm/pkg/doc/testdata/dirs/crypto/rand/rand.gno new file mode 100644 index 00000000000..e69de29bb2d diff --git a/gnovm/pkg/doc/testdata/dirs/crypto/testdata/rand/ignored.gno b/gnovm/pkg/doc/testdata/dirs/crypto/testdata/rand/ignored.gno new file mode 100644 index 00000000000..e69de29bb2d diff --git a/gnovm/pkg/doc/testdata/dirs/math/math.gno b/gnovm/pkg/doc/testdata/dirs/math/math.gno new file mode 100644 index 00000000000..e69de29bb2d diff --git a/gnovm/pkg/doc/testdata/dirs/math/rand/rand.gno b/gnovm/pkg/doc/testdata/dirs/math/rand/rand.gno new file mode 100644 index 00000000000..e69de29bb2d diff --git a/gnovm/pkg/doc/testdata/dirs/rand/rand.gno b/gnovm/pkg/doc/testdata/dirs/rand/rand.gno new file mode 100644 index 00000000000..e69de29bb2d diff --git a/gnovm/pkg/doc/testdata/integ/crypto/rand/rand.gno b/gnovm/pkg/doc/testdata/integ/crypto/rand/rand.gno new file mode 100644 index 00000000000..f30d3557386 --- /dev/null +++ b/gnovm/pkg/doc/testdata/integ/crypto/rand/rand.gno @@ -0,0 +1,51 @@ +package rand + +import ( + "io" +) + +// Crypto symbol. +func Crypto() {} + +func NewWriter() io.Writer {} + +// A Rand is a test symbol for structs. +type Rand struct { + Name string // comment1 + Value int // comment2 + Attempts bool // comment3 + unexp chan int // comment4 +} + +type Rander interface { + Generate() +} + +type RandEmbedder struct { + A string + Rand +} + +// NewRand generates a new Rand. +func NewRand() *Rand { + return nil +} + +func (*Rand) Generate() { +} + +// Flag is tested for constant doc. +type Flag int + +// Common flag values. +const ( + FlagA Flag = 1 << iota + FlagB + FlagC +) + +var FlagVar Flag = 9999 + +var ExportedVar = true + +var unexp = 1 diff --git a/gnovm/pkg/doc/testdata/integ/rand/rand.gno b/gnovm/pkg/doc/testdata/integ/rand/rand.gno new file mode 100644 index 00000000000..7e8c41f0db7 --- /dev/null +++ b/gnovm/pkg/doc/testdata/integ/rand/rand.gno @@ -0,0 +1,5 @@ +package rand + +// Normal symbol. +func Normal() { +} diff --git a/gnovm/pkg/doc/testdata/integ/test_notapkg/a.gno b/gnovm/pkg/doc/testdata/integ/test_notapkg/a.gno new file mode 100644 index 00000000000..bd4a4e79f49 --- /dev/null +++ b/gnovm/pkg/doc/testdata/integ/test_notapkg/a.gno @@ -0,0 +1,3 @@ +package notapkg + +var I int diff --git a/gnovm/pkg/doc/testdata/integ/wd/wd.gno b/gnovm/pkg/doc/testdata/integ/wd/wd.gno new file mode 100644 index 00000000000..23fbddf63ea --- /dev/null +++ b/gnovm/pkg/doc/testdata/integ/wd/wd.gno @@ -0,0 +1,13 @@ +package wd + +// Used for testing symbols relative to local dir. + +const WdConst = 1 + +var WdVar = 1 + +type WdType int + +func (WdType) WdMethod() {} + +func WdFunc() {}