diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml index aa6f0059..672c337c 100644 --- a/.github/workflows/create-release.yml +++ b/.github/workflows/create-release.yml @@ -125,8 +125,8 @@ jobs: release_name: Release ${{ github.ref }} body: | Changes in this Release - - **UPDATED** Updated longtail to v0.2.16 - fixes handling of UNC paths - - **FIX** Minor logging tweaks + - **ADDED** Added `--cache-target-index` option to downsync/unpack that automatically caches target folder state. Default on, turn off with `--no-cache-target-index` + - **UPDATED** Updated longtail to v0.2.18 draft: false prerelease: false - name: Download Linux artifacts diff --git a/commands/cmd_clonestore.go b/commands/cmd_clonestore.go index f86fdedf..388a2005 100644 --- a/commands/cmd_clonestore.go +++ b/commands/cmd_clonestore.go @@ -92,10 +92,10 @@ func downloadFromZip(targetPath string, sourceFileZipPath string) error { "sourceFileZipPath": sourceFileZipPath, }) if sourceFileZipPath == "" { - err := fmt.Errorf("Skipping, no zip file available for `%s`", sourceFileZipPath) + err := fmt.Errorf("skipping, no zip file available for `%s`", sourceFileZipPath) return errors.Wrap(err, fname) } - log.Infof("Falling back to reading ZIP source from `%s`", sourceFileZipPath) + log.Infof("falling back to reading ZIP source from `%s`", sourceFileZipPath) zipBytes, err := longtailutils.ReadFromURI(sourceFileZipPath) if err != nil { return errors.Wrap(err, fname) @@ -128,7 +128,7 @@ func downloadFromZip(targetPath string, sourceFileZipPath string) error { defer func() { if err := rc.Close(); err != nil { err = errors.Wrap(err, fname) - log.WithError(err).Errorf("Failed to close zip file") + log.WithError(err).Errorf("failed to close zip file") } }() @@ -137,7 +137,7 @@ func downloadFromZip(targetPath string, sourceFileZipPath string) error { // Check for ZipSlip (Directory traversal) if !strings.HasPrefix(path, filepath.Clean(targetPath)+string(os.PathSeparator)) { - err := fmt.Errorf("Illegal file path: `%s`", path) + err := fmt.Errorf("illegal file path: `%s`", path) return errors.Wrap(err, fname) } @@ -158,7 +158,7 @@ func downloadFromZip(targetPath string, sourceFileZipPath string) error { defer func() { if err := f.Close(); err != nil { err = errors.Wrap(err, fname) - log.WithError(err).Errorf("Failed to close target file") + log.WithError(err).Errorf("failed to close target file") } }() @@ -217,7 +217,7 @@ func updateCurrentVersionFromLongtail( localVersionIndex = cloneVersionIndex(targetPathVersionIndex) hash, err = hashRegistry.GetHashAPI(hashIdentifier) if err != nil { - err = errors.Wrap(err, fmt.Sprintf("Unsupported hash identifier `%d`", hashIdentifier)) + err = errors.Wrap(err, fmt.Sprintf("unsupported hash identifier `%d`", hashIdentifier)) return localVersionIndex, longtaillib.Longtail_HashAPI{}, errors.Wrap(err, fname) } } else { @@ -238,7 +238,7 @@ func updateCurrentVersionFromLongtail( localVersionIndex, hash, _, err = targetIndexReader.Get() if err != nil { - err := errors.Wrap(err, "Failed scanning target path") + err := errors.Wrap(err, "failed scanning target path") return longtaillib.Longtail_VersionIndex{}, hash, errors.Wrap(err, fname) } } @@ -248,7 +248,7 @@ func updateCurrentVersionFromLongtail( localVersionIndex, sourceVersionIndex) if err != nil { - err = errors.Wrap(err, fmt.Sprintf("Failed to create version diff. `%s` -> `%s`", targetPath, sourceFilePath)) + err = errors.Wrap(err, fmt.Sprintf("failed to create version diff. `%s` -> `%s`", targetPath, sourceFilePath)) return localVersionIndex, hash, errors.Wrap(err, fname) } defer versionDiff.Dispose() @@ -257,7 +257,7 @@ func updateCurrentVersionFromLongtail( sourceVersionIndex, versionDiff) if err != nil { - err = errors.Wrap(err, fmt.Sprintf("Failed to get required chunk hashes. `%s` -> `%s`", targetPath, sourceFilePath)) + err = errors.Wrap(err, fmt.Sprintf("failed to get required chunk hashes. `%s` -> `%s`", targetPath, sourceFilePath)) return localVersionIndex, hash, errors.Wrap(err, fname) } @@ -313,7 +313,7 @@ func updateCurrentVersionFromLongtail( localVersionIndex, hash, _, err = targetIndexReader.Get() if err != nil { - err := errors.Wrap(err, "Failed scanning target path") + err := errors.Wrap(err, "failed scanning target path") return longtaillib.Longtail_VersionIndex{}, hash, errors.Wrap(err, fname) } return localVersionIndex, hash, nil @@ -388,7 +388,7 @@ func cloneOneVersion( targetBlockSize, maxChunksPerBlock) if err != nil { - err = errors.Wrap(err, fmt.Sprintf("Failed creating missing content store index for `%s`", targetPath)) + err = errors.Wrap(err, fmt.Sprintf("failed creating missing content store index for `%s`", targetPath)) return targetVersionIndex, errors.Wrap(err, fname) } defer versionMissingStoreIndex.Dispose() @@ -406,7 +406,7 @@ func cloneOneVersion( longtailutils.NormalizePath(targetPath)) writeContentProgress.Dispose() if err != nil { - err = errors.Wrap(err, fmt.Sprintf("Failed writing content from `%s`", targetPath)) + err = errors.Wrap(err, fmt.Sprintf("failed writing content from `%s`", targetPath)) return targetVersionIndex, errors.Wrap(err, fname) } } @@ -434,13 +434,13 @@ func cloneOneVersion( versionLocalStoreIndexPath := strings.Replace(targetFilePath, ".lvi", ".lsi", -1) // TODO: This should use a file with path names instead of this rename hack! versionLocalStoreIndex, err := longtaillib.MergeStoreIndex(newExistingStoreIndex, versionMissingStoreIndex) if err != nil { - err = errors.Wrap(err, fmt.Sprintf("Failed merging store index for `%s`", versionLocalStoreIndexPath)) + err = errors.Wrap(err, fmt.Sprintf("failed merging store index for `%s`", versionLocalStoreIndexPath)) return targetVersionIndex, errors.Wrap(err, fname) } versionLocalStoreIndexBuffer, err := longtaillib.WriteStoreIndexToBuffer(versionLocalStoreIndex) versionLocalStoreIndex.Dispose() if err != nil { - err = errors.Wrap(err, fmt.Sprintf("Failed serializing store index for `%s`", versionLocalStoreIndexPath)) + err = errors.Wrap(err, fmt.Sprintf("failed serializing store index for `%s`", versionLocalStoreIndexPath)) return targetVersionIndex, errors.Wrap(err, fname) } err = longtailutils.WriteToURI(versionLocalStoreIndexPath, versionLocalStoreIndexBuffer) diff --git a/commands/cmd_downsync.go b/commands/cmd_downsync.go index d3e6358e..8cca7d09 100644 --- a/commands/cmd_downsync.go +++ b/commands/cmd_downsync.go @@ -24,7 +24,8 @@ func downsync( versionLocalStoreIndexPath string, includeFilterRegEx string, excludeFilterRegEx string, - scanTarget bool) ([]longtailutils.StoreStat, []longtailutils.TimeStat, error) { + scanTarget bool, + cacheTargetIndex bool) ([]longtailutils.StoreStat, []longtailutils.TimeStat, error) { const fname = "downsync" log := logrus.WithFields(logrus.Fields{ "fname": fname, @@ -40,6 +41,7 @@ func downsync( "includeFilterRegEx": includeFilterRegEx, "excludeFilterRegEx": excludeFilterRegEx, "scanTarget": scanTarget, + "cacheTargetIndex": cacheTargetIndex, }) log.Debug(fname) @@ -63,7 +65,7 @@ func downsync( sourceNameSplit := strings.Split(sourceName, ".") resolvedTargetFolderPath = sourceNameSplit[0] if resolvedTargetFolderPath == "" { - err = fmt.Errorf("Unable to resolve target path using `%s` as base", sourceFilePath) + err = fmt.Errorf("unable to resolve target path using `%s` as base", sourceFilePath) return storeStats, timeStats, errors.Wrap(err, fname) } } else { @@ -73,6 +75,18 @@ func downsync( fs := longtaillib.CreateFSStorageAPI() defer fs.Dispose() + if targetIndexPath != "" { + cacheTargetIndex = false + } + + cacheTargetIndexPath := resolvedTargetFolderPath + "/.longtail.index.cache.lvi" + + if cacheTargetIndex { + if longtaillib.FileExists(fs, cacheTargetIndexPath) { + targetIndexPath = cacheTargetIndexPath + } + } + targetFolderScanner := longtailutils.AsyncFolderScanner{} if scanTarget && targetIndexPath == "" { targetFolderScanner.Scan(resolvedTargetFolderPath, pathFilter, fs) @@ -191,6 +205,13 @@ func downsync( getExistingContentTime := time.Since(getExistingContentStartTime) timeStats = append(timeStats, longtailutils.TimeStat{"Get content index", getExistingContentTime}) + if cacheTargetIndex && longtaillib.FileExists(fs, cacheTargetIndexPath) { + err = longtaillib.DeleteFile(fs, cacheTargetIndexPath) + if err != nil { + return storeStats, timeStats, errors.Wrap(err, fname) + } + } + changeVersionStartTime := time.Now() changeVersionProgress := longtailutils.CreateProgress("Updating version", 2) defer changeVersionProgress.Dispose() @@ -334,6 +355,13 @@ func downsync( timeStats = append(timeStats, longtailutils.TimeStat{"Validate", validateTime}) } + if cacheTargetIndex { + err = longtaillib.WriteVersionIndex(fs, sourceVersionIndex, cacheTargetIndexPath) + if err != nil { + return storeStats, timeStats, errors.Wrap(err, fname) + } + } + return storeStats, timeStats, nil } @@ -349,6 +377,7 @@ type DownsyncCmd struct { TargetPathIncludeRegExOption TargetPathExcludeRegExOption ScanTargetOption + CacheTargetIndexOption } func (r *DownsyncCmd) Run(ctx *Context) error { @@ -364,7 +393,8 @@ func (r *DownsyncCmd) Run(ctx *Context) error { r.VersionLocalStoreIndexPath, r.IncludeFilterRegEx, r.ExcludeFilterRegEx, - r.ScanTarget) + r.ScanTarget, + r.CacheTargetIndex) ctx.StoreStats = append(ctx.StoreStats, storeStats...) ctx.TimeStats = append(ctx.TimeStats, timeStats...) return err diff --git a/commands/cmd_get.go b/commands/cmd_get.go index 82048f54..78c3e7a5 100644 --- a/commands/cmd_get.go +++ b/commands/cmd_get.go @@ -21,7 +21,8 @@ func get( validate bool, includeFilterRegEx string, excludeFilterRegEx string, - scanTarget bool) ([]longtailutils.StoreStat, []longtailutils.TimeStat, error) { + scanTarget bool, + cacheTargetIndex bool) ([]longtailutils.StoreStat, []longtailutils.TimeStat, error) { const fname = "get" log := logrus.WithFields(logrus.Fields{ "fname": fname, @@ -35,6 +36,7 @@ func get( "includeFilterRegEx": includeFilterRegEx, "excludeFilterRegEx": excludeFilterRegEx, "scanTarget": scanTarget, + "cacheTargetIndex": cacheTargetIndex, }) log.Debug(fname) @@ -57,7 +59,7 @@ func get( blobStoreURI := v.GetString("storage-uri") if blobStoreURI == "" { - err = fmt.Errorf("Missing storage-uri in get-config `%s`", getConfigPath) + err = fmt.Errorf("missing storage-uri in get-config `%s`", getConfigPath) return storeStats, timeStats, errors.Wrap(err, fname) } sourceFilePath := v.GetString("source-path") @@ -85,7 +87,8 @@ func get( versionLocalStoreIndexPath, includeFilterRegEx, excludeFilterRegEx, - scanTarget) + scanTarget, + cacheTargetIndex) storeStats = append(storeStats, downSyncStoreStats...) timeStats = append(timeStats, downSyncTimeStats...) @@ -104,6 +107,7 @@ type GetCmd struct { TargetPathIncludeRegExOption TargetPathExcludeRegExOption ScanTargetOption + CacheTargetIndexOption } func (r *GetCmd) Run(ctx *Context) error { @@ -117,7 +121,8 @@ func (r *GetCmd) Run(ctx *Context) error { r.Validate, r.IncludeFilterRegEx, r.ExcludeFilterRegEx, - r.ScanTarget) + r.ScanTarget, + r.CacheTargetIndex) ctx.StoreStats = append(ctx.StoreStats, storeStats...) ctx.TimeStats = append(ctx.TimeStats, timeStats...) return err diff --git a/commands/cmd_pack.go b/commands/cmd_pack.go index a06cb14c..9bbcdda9 100644 --- a/commands/cmd_pack.go +++ b/commands/cmd_pack.go @@ -58,7 +58,7 @@ func pack( sourceNameSplit := strings.Split(sourceName, ".") resolvedTargetPath = sourceNameSplit[0] if resolvedTargetPath == "" { - err = fmt.Errorf("Unable to resolve target path using `%s` as base", sourceFolderPath) + err = fmt.Errorf("unable to resolve target path using `%s` as base", sourceFolderPath) return storeStats, timeStats, errors.Wrap(err, fname) } resolvedTargetPath += ".la" diff --git a/commands/cmd_printstore_test.go b/commands/cmd_printstore_test.go index 3e0d3dd4..6e1bbb74 100644 --- a/commands/cmd_printstore_test.go +++ b/commands/cmd_printstore_test.go @@ -2,41 +2,9 @@ package commands import ( "io/ioutil" - "runtime" "testing" - - "github.com/alecthomas/kong" ) -func runPrintStore(t *testing.T, storeIndexPath string, compact bool, details bool) { - parser, err := kong.New(&Cli) - if err != nil { - t.Errorf("kong.New(Cli) failed with %s", err) - } - args := []string{ - "print-store", - "--store-index-path", storeIndexPath, - } - if compact { - args = append(args, "--compact") - } - if details { - args = append(args, "--details") - } - ctx, err := parser.Parse(args) - if err != nil { - t.Errorf("parser.Parse() failed with %s", err) - } - - context := &Context{ - NumWorkerCount: runtime.NumCPU(), - } - err = ctx.Run(context) - if err != nil { - t.Errorf("ctx.Run(context) failed with %s", err) - } -} - func TestPrintStoreIndex(t *testing.T) { testPath, _ := ioutil.TempDir("", "test") fsBlobPathPrefix := "fsblob://" + testPath diff --git a/commands/cmd_unpack.go b/commands/cmd_unpack.go index 4ce9228e..facbfca7 100644 --- a/commands/cmd_unpack.go +++ b/commands/cmd_unpack.go @@ -17,11 +17,12 @@ func unpack( sourceFilePath string, targetFolderPath string, targetIndexPath string, + retainPermissions bool, + validate bool, includeFilterRegEx string, excludeFilterRegEx string, scanTarget bool, - retainPermissions bool, - validate bool) ([]longtailutils.StoreStat, []longtailutils.TimeStat, error) { + cacheTargetIndex bool) ([]longtailutils.StoreStat, []longtailutils.TimeStat, error) { const fname = "unpack" log := logrus.WithContext(context.Background()).WithFields(logrus.Fields{ "fname": fname, @@ -29,11 +30,12 @@ func unpack( "sourceFilePath": sourceFilePath, "targetFolderPath": targetFolderPath, "targetIndexPath": targetIndexPath, + "retainPermissions": retainPermissions, + "validate": validate, "includeFilterRegEx": includeFilterRegEx, "excludeFilterRegEx": excludeFilterRegEx, "scanTarget": scanTarget, - "retainPermissions": retainPermissions, - "validate": validate, + "cacheTargetIndex": cacheTargetIndex, }) log.Debug(fname) @@ -57,7 +59,7 @@ func unpack( sourceNameSplit := strings.Split(sourceName, ".") resolvedTargetFolderPath = sourceNameSplit[0] if resolvedTargetFolderPath == "" { - err = fmt.Errorf("Unable to resolve target path using `%s` as base", sourceFilePath) + err = fmt.Errorf("unable to resolve target path using `%s` as base", sourceFilePath) return storeStats, timeStats, errors.Wrap(err, fname) } } else { @@ -67,6 +69,18 @@ func unpack( fs := longtaillib.CreateFSStorageAPI() defer fs.Dispose() + if targetIndexPath != "" { + cacheTargetIndex = false + } + + cacheTargetIndexPath := resolvedTargetFolderPath + "/.longtail.index.cache.lvi" + + if cacheTargetIndex { + if longtaillib.FileExists(fs, cacheTargetIndexPath) { + targetIndexPath = cacheTargetIndexPath + } + } + targetFolderScanner := longtailutils.AsyncFolderScanner{} if scanTarget && targetIndexPath == "" { targetFolderScanner.Scan(resolvedTargetFolderPath, pathFilter, fs) @@ -148,6 +162,13 @@ func unpack( getVersionDiffTime := time.Since(getVersionDiffStartTime) timeStats = append(timeStats, longtailutils.TimeStat{"Get diff", getVersionDiffTime}) + if cacheTargetIndex && longtaillib.FileExists(fs, cacheTargetIndexPath) { + err = longtaillib.DeleteFile(fs, cacheTargetIndexPath) + if err != nil { + return storeStats, timeStats, errors.Wrap(err, fname) + } + } + changeVersionStartTime := time.Now() changeVersionProgress := longtailutils.CreateProgress("Updating version", 2) defer changeVersionProgress.Dispose() @@ -281,6 +302,13 @@ func unpack( timeStats = append(timeStats, longtailutils.TimeStat{"Validate", validateTime}) } + if cacheTargetIndex { + err = longtaillib.WriteVersionIndex(fs, sourceVersionIndex, cacheTargetIndexPath) + if err != nil { + return storeStats, timeStats, errors.Wrap(err, fname) + } + } + return storeStats, timeStats, nil } @@ -288,11 +316,12 @@ type UnpackCmd struct { SourcePath string `name:"source-path" help:"Source folder path" required:""` TargetPath string `name:"target-path" help:"Target file uri"` TargetIndexPath string `name:"target-index-path" help:"Optional pre-computed index of target-path"` + RetainPermissionsOption + ValidateTargetOption TargetPathIncludeRegExOption TargetPathExcludeRegExOption ScanTargetOption - RetainPermissionsOption - ValidateTargetOption + CacheTargetIndexOption } func (r *UnpackCmd) Run(ctx *Context) error { @@ -301,11 +330,12 @@ func (r *UnpackCmd) Run(ctx *Context) error { r.SourcePath, r.TargetPath, r.TargetIndexPath, + r.RetainPermissions, + r.Validate, r.IncludeFilterRegEx, r.ExcludeFilterRegEx, r.ScanTarget, - r.RetainPermissions, - r.Validate) + r.CacheTargetIndex) ctx.StoreStats = append(ctx.StoreStats, storeStats...) ctx.TimeStats = append(ctx.TimeStats, timeStats...) return err diff --git a/commands/commands_test.go b/commands/commands_test.go index af8556d2..85647032 100644 --- a/commands/commands_test.go +++ b/commands/commands_test.go @@ -65,7 +65,9 @@ func validateContent(t *testing.T, baseURI string, path string, content map[stri } foundItems[n] = string(b) } else { - t.Errorf("Unexpected file `%s`", n) + if n != ".longtail.index.cache.lvi" { + t.Errorf("Unexpected file `%s`", n) + } } } if len(foundItems) != len(content) { diff --git a/commands/options.go b/commands/options.go index 6b13df59..0bd5061b 100644 --- a/commands/options.go +++ b/commands/options.go @@ -95,3 +95,7 @@ type TargetBlockSizeOption struct { type ScanTargetOption struct { ScanTarget bool `name:"scan-target" help:"Enables scanning of target folder before write. Disable it to only add/write content to a folder" default:"true" negatable:""` } + +type CacheTargetIndexOption struct { + CacheTargetIndex bool `name:"cache-target-index" help:"Stores a copy version index for the target folder and uses it if it exists, skipping folder scanning" default:"true" negatable:""` +} diff --git a/longtaillib/longtail/liblongtail_darwin_x64.a b/longtaillib/longtail/liblongtail_darwin_x64.a index 436f37a5..6f7e1658 100644 Binary files a/longtaillib/longtail/liblongtail_darwin_x64.a and b/longtaillib/longtail/liblongtail_darwin_x64.a differ diff --git a/longtaillib/longtail/liblongtail_linux_x64.a b/longtaillib/longtail/liblongtail_linux_x64.a index af867380..6c495852 100644 Binary files a/longtaillib/longtail/liblongtail_linux_x64.a and b/longtaillib/longtail/liblongtail_linux_x64.a differ diff --git a/longtaillib/longtail/liblongtail_win32_x64.a b/longtaillib/longtail/liblongtail_win32_x64.a index 3f0c6d1e..1d34c48f 100644 Binary files a/longtaillib/longtail/liblongtail_win32_x64.a and b/longtaillib/longtail/liblongtail_win32_x64.a differ diff --git a/longtaillib/longtaillib.go b/longtaillib/longtaillib.go index b077d8ae..9c0854da 100644 --- a/longtaillib/longtaillib.go +++ b/longtaillib/longtaillib.go @@ -103,7 +103,7 @@ func errnoToError(err C.int) error { if err == 0 { return nil } - description, _ := errnoToDescription[int(err)] + description := errnoToDescription[int(err)] return &longtailError{Errno: err, Description: description} } @@ -1654,6 +1654,27 @@ func ReadVersionIndex(storageAPI Longtail_StorageAPI, path string) (Longtail_Ver return Longtail_VersionIndex{cVersionIndex: vindex}, nil } +func FileExists(storageAPI Longtail_StorageAPI, path string) bool { + const fname = "FileExists" + + cPath := C.CString(path) + defer C.free(unsafe.Pointer(cPath)) + exists := C.Longtail_Storage_IsFile(storageAPI.cStorageAPI, cPath) + return exists != 0 +} + +func DeleteFile(storageAPI Longtail_StorageAPI, path string) error { + const fname = "FileExists" + + cPath := C.CString(path) + defer C.free(unsafe.Pointer(cPath)) + errno := C.Longtail_Storage_RemoveFile(storageAPI.cStorageAPI, cPath) + if errno != 0 { + return errors.Wrap(errnoToError(errno), fname) + } + return nil +} + // CreateStoreIndexFromBlocks ... func CreateStoreIndexFromBlocks(blockIndexes []Longtail_BlockIndex) (Longtail_StoreIndex, error) { const fname = "CreateStoreIndexFromBlocks" diff --git a/longtaillib/longtaillib_test.go b/longtaillib/longtaillib_test.go index d205ea6d..e58ab51d 100644 --- a/longtaillib/longtaillib_test.go +++ b/longtaillib/longtaillib_test.go @@ -137,7 +137,7 @@ func TestInMemStorage(t *testing.T) { t.Errorf("ReadFromStorage() %s", err) } testString := string(rbytes) - if myString != myString { + if myString != testString { t.Errorf("ReadFromStorage() %s != %s", rbytes, testString) } } @@ -238,7 +238,7 @@ func validateStoredBlock(t *testing.T, storedBlock Longtail_StoredBlock, hashIde t.Errorf("validateStoredBlock() %d != %d", chunkCount, uint32(len(chunkSizes))) } blockOffset := uint32(0) - for index, _ := range chunkHashes { + for index := range chunkHashes { if chunkHashes[index] != uint64(index+1)*4711 { t.Errorf("validateStoredBlock() %d != %d", uint64(index)*4711, chunkHashes[index]) } @@ -252,7 +252,7 @@ func validateStoredBlock(t *testing.T, storedBlock Longtail_StoredBlock, hashIde t.Errorf("validateStoredBlock() %d != %d", uint32(len(blockData)), blockOffset) } blockOffset = 0 - for chunkIndex, _ := range chunkHashes { + for chunkIndex := range chunkHashes { for index := uint32(0); index < uint32(chunkSizes[chunkIndex]); index++ { if blockData[blockOffset+index] != uint8(chunkIndex+1) { t.Errorf("validateStoredBlock() %d != %d", uint8(chunkIndex+1), blockData[blockOffset+index]) @@ -588,7 +588,7 @@ func (b *TestBlockStore) PruneBlocks( keepMap[b] = true } var removeBlocks []uint64 - for h, _ := range b.blocks { + for h := range b.blocks { if _, exists := keepMap[h]; exists { continue } @@ -899,67 +899,67 @@ func TestWriteContent(t *testing.T) { } defer versionIndex.Dispose() - if 0 == versionIndex.GetVersion() { + if versionIndex.GetVersion() == 0 { t.Errorf("TestWriteContent() GetVersion() %d", versionIndex.GetVersion()) } - if 0 == versionIndex.GetHashIdentifier() { + if versionIndex.GetHashIdentifier() == 0 { t.Errorf("TestWriteContent() GetHashIdentifier() %d", versionIndex.GetHashIdentifier()) } - if 0 == versionIndex.GetTargetChunkSize() { + if versionIndex.GetTargetChunkSize() == 0 { t.Errorf("TestWriteContent() GetTargetChunkSize() %d", versionIndex.GetTargetChunkSize()) } - if 0 == versionIndex.GetAssetCount() { + if versionIndex.GetAssetCount() == 0 { t.Errorf("TestWriteContent() GetAssetCount() %d", versionIndex.GetAssetCount()) } - if "" == versionIndex.GetAssetPath(0) { + if versionIndex.GetAssetPath(0) == "" { t.Errorf("TestWriteContent() GetAssetPath(0) %s", versionIndex.GetAssetPath(0)) } - if nil == versionIndex.GetAssetHashes() { + if versionIndex.GetAssetHashes() == nil { t.Errorf("TestWriteContent() GetAssetHashes() %q", versionIndex.GetAssetHashes()) } - if 0xffffffffffffffff == versionIndex.GetAssetSize(0) { + if versionIndex.GetAssetSize(0) == 0xffffffffffffffff { t.Errorf("TestWriteContent() versionIndex.GetAssetSize(0) %d", versionIndex.GetAssetSize(0)) } - if 0xffff == versionIndex.GetAssetPermissions(0) { + if versionIndex.GetAssetPermissions(0) == 0xffff { t.Errorf("TestWriteContent() versionIndex.GetAssetPermissions(0) %d", versionIndex.GetAssetPermissions(0)) } - if nil == versionIndex.GetAssetChunkCounts() { + if versionIndex.GetAssetChunkCounts() == nil { t.Errorf("TestWriteContent() versionIndex.GetAssetChunkCounts() %q", versionIndex.GetAssetChunkCounts()) } - if nil == versionIndex.GetAssetChunkIndexStarts() { + if versionIndex.GetAssetChunkIndexStarts() == nil { t.Errorf("TestWriteContent() GetAssetChunkIndexStarts() %q", versionIndex.GetAssetChunkIndexStarts()) } - if nil == versionIndex.GetAssetChunkIndexes() { + if versionIndex.GetAssetChunkIndexes() == nil { t.Errorf("TestWriteContent() GetAssetChunkIndexes() %q", versionIndex.GetAssetChunkIndexes()) } - if 0 == versionIndex.GetChunkCount() { + if versionIndex.GetChunkCount() == 0 { t.Errorf("TestWriteContent() GetChunkCount() %d", versionIndex.GetChunkCount()) } - if nil == versionIndex.GetChunkHashes() { + if versionIndex.GetChunkHashes() == nil { t.Errorf("TestWriteContent() GetChunkHashes() %q", versionIndex.GetChunkHashes()) } - if nil == versionIndex.GetChunkSizes() { + if versionIndex.GetChunkSizes() == nil { t.Errorf("TestWriteContent() GetChunkSizes() %q", versionIndex.GetChunkSizes()) } - if nil == versionIndex.GetAssetSizes() { + if versionIndex.GetAssetSizes() == nil { t.Errorf("TestWriteContent() GetAssetSizes() %q", versionIndex.GetAssetSizes()) } - if nil == versionIndex.GetChunkTags() { + if versionIndex.GetChunkTags() == nil { t.Errorf("TestWriteContent() GetChunkTags() %q", versionIndex.GetChunkTags()) }