From 44f20f5f45faa2758d6a69176f9ca3cbc87ec8cc Mon Sep 17 00:00:00 2001 From: Ivan Bobev Date: Wed, 8 Sep 2021 19:53:02 +0300 Subject: [PATCH] Implement async download based on chronos - implementation of async download based on chronos asyncproc - the PR is based on #938 --- .github/workflows/test.yml | 3 + .gitignore | 3 + .gitmodules | 9 + changelog.markdown | 1 + nim-bearssl | 1 + nim-chronos | 1 + nim-stew | 1 + nimble.nimble | 5 +- src/{nimble.nim.cfg => nim.cfg} | 4 + src/nimble.nim | 282 +++++++++++++++++++++++--------- src/nimblepkg/download.nim | 200 ++++++++++++---------- src/nimblepkg/lockfile.nim | 2 +- src/nimblepkg/options.nim | 9 + src/nimblepkg/sha1hashes.nim | 2 + src/nimblepkg/tools.nim | 32 +++- tests/config.nims | 2 + tests/nim.cfg | 4 + tests/testscommon.nim | 2 +- 18 files changed, 390 insertions(+), 173 deletions(-) create mode 100644 .gitmodules create mode 160000 nim-bearssl create mode 160000 nim-chronos create mode 160000 nim-stew rename src/{nimble.nim.cfg => nim.cfg} (68%) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index fccfde598..8cdd96bea 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -27,6 +27,9 @@ jobs: - name: Install Mercurial on macOS if: matrix.os == 'macos-latest' run: brew install mercurial + - name: setup submodules + run: | + git submodule update --init - name: Run nim c -r tester run: | cd tests diff --git a/.gitignore b/.gitignore index ec6622fda..80a58a30f 100644 --- a/.gitignore +++ b/.gitignore @@ -62,3 +62,6 @@ src/nimblepkg/version # Test procedure artifacts *.nims /buildTests +/nimble.develop +nimble.paths +*.paths diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 000000000..1d73a759d --- /dev/null +++ b/.gitmodules @@ -0,0 +1,9 @@ +[submodule "nim-bearssl"] + path = nim-bearssl + url = https://github.com/status-im/nim-bearssl +[submodule "nim-chronos"] + path = nim-chronos + url = https://github.com/status-im/nim-chronos +[submodule "nim-stew"] + path = nim-stew + url = https://github.com/status-im/nim-stew diff --git a/changelog.markdown b/changelog.markdown index 75ee28804..417741950 100644 --- a/changelog.markdown +++ b/changelog.markdown @@ -12,6 +12,7 @@ This is a major release containing four new features: - Download tarballs when downloading packages from GitHub. - A setup command. - Added a `--package, -p` command line option. +- Parallel downloads of the locked dependencies. ## 0.13.0 diff --git a/nim-bearssl b/nim-bearssl new file mode 160000 index 000000000..f4c4233de --- /dev/null +++ b/nim-bearssl @@ -0,0 +1 @@ +Subproject commit f4c4233de453cb7eac0ce3f3ffad6496295f83ab diff --git a/nim-chronos b/nim-chronos new file mode 160000 index 000000000..f700a9315 --- /dev/null +++ b/nim-chronos @@ -0,0 +1 @@ +Subproject commit f700a9315bf66c002cd6587701c609034491e645 diff --git a/nim-stew b/nim-stew new file mode 160000 index 000000000..018760954 --- /dev/null +++ b/nim-stew @@ -0,0 +1 @@ +Subproject commit 018760954a1530b7336aed7133393908875d860f diff --git a/nimble.nimble b/nimble.nimble index d56edcf57..1e8b3133f 100644 --- a/nimble.nimble +++ b/nimble.nimble @@ -11,7 +11,8 @@ installExt = @["nim"] # Dependencies -requires "nim >= 0.13.0" +requires "nim >= 0.13.0", "chronos", "bearssl", "stew" + when defined(nimdistros): import distros @@ -22,4 +23,4 @@ when defined(nimdistros): task test, "Run the Nimble tester!": withDir "tests": - exec "nim c -r tester" + exec "nim c -r -d:release tester" diff --git a/src/nimble.nim.cfg b/src/nim.cfg similarity index 68% rename from src/nimble.nim.cfg rename to src/nim.cfg index e45905529..8918f434e 100644 --- a/src/nimble.nim.cfg +++ b/src/nim.cfg @@ -1,6 +1,10 @@ --path:"$lib/packages/docutils" #--noNimblePath +--threads:off --path:"$nim/" --path:"./vendor/nim" +--path:"../nim-stew" +--path:"../nim-chronos" +--path:"../nim-bearssl" -d:ssl -d:nimcore # Enable 'gorge' in Nim's VM. See https://github.com/nim-lang/Nim/issues/8096 diff --git a/src/nimble.nim b/src/nimble.nim index 25a54790e..e94ab95ff 100644 --- a/src/nimble.nim +++ b/src/nimble.nim @@ -8,6 +8,7 @@ import os, tables, strtabs, json, algorithm, sets, uri, sugar, sequtils, osproc, import std/options as std_opt +import chronos except Error import strutils except toLower from unicode import toLower @@ -27,6 +28,9 @@ const gitIgnoreFileName = ".gitignore" hgIgnoreFileName = ".hgignore" +type + DownloadInfo = (DownloadMethod, string, Table[string, string]) + proc refresh(options: Options) = ## Downloads the package list from the specified URL. ## @@ -64,10 +68,66 @@ proc initPkgList(pkgInfo: PackageInfo, options: Options): seq[PackageInfo] = {.warning[ProveInit]: on.} proc install(packages: seq[PkgTuple], options: Options, - doPrompt, first, fromLockFile: bool): PackageDependenciesInfo + doPrompt, first, fromLockFile: bool): + Future[PackageDependenciesInfo] {.async.} + +proc getDownloadInfo(pv: PkgTuple, options: Options, doPrompt: bool, ignorePackageCache = false): + DownloadInfo + +proc checkForAlreadyInstalledPkg(dep: PkgTuple, pkgList: seq[PackageInfo], + resolvedDep: var PkgTuple, pkg: var PackageInfo, options: Options): bool = + resolvedDep = dep.resolveAlias(options) + display("Checking", "for $1" % $resolvedDep, priority = MediumPriority) + result = findPkg(pkgList, resolvedDep, pkg) + # Check if the original name exists. + if not result and resolvedDep.name != dep.name: + display("Checking", "for $1" % $dep, priority = MediumPriority) + result = findPkg(pkgList, dep, pkg) + if result: + displayWarning(&"Installed package {dep.name} should be renamed to " & + resolvedDep.name) proc processFreeDependencies(pkgInfo: PackageInfo, options: Options): - HashSet[PackageInfo] = + Future[HashSet[PackageInfo]] {.async.} + +proc processFreeDependenciesOfAlreadyInstalledPkg( + resultSet: ptr HashSet[PackageInfo], + reverseDependencies: ptr seq[PackageBasicInfo], + dep: PkgTuple, pkg: PackageInfo, options: Options): Future[void] {.async.} = + displayInfo(pkgDepsAlreadySatisfiedMsg(dep)) + resultSet[].incl pkg + # Process the dependencies of this dependency. + resultSet[].incl await processFreeDependencies( + pkg.toFullInfo(options), options) + if not pkg.isLink: + reverseDependencies[].add(pkg.basicInfo) + +proc addDepsToResultSet(deps: HashSet[PackageInfo], + resultSet: ptr HashSet[PackageInfo]) = + for dep in deps: + if resultSet[].contains dep: + # If the package already exists in the result set we had to merge its + # special versions set into the special versions set of the existing + # one. + resultSet[][dep].metaData.specialVersions.incl( + dep.metaData.specialVersions) + else: + resultSet[].incl dep + +proc awaitInstallFutures(futures: seq[Future[PackageDependenciesInfo]], + pkgList: ptr seq[PackageInfo], + reverseDependencies: ptr seq[PackageBasicInfo], + resultSet: ptr HashSet[PackageInfo]): + Future[void] {.async.} = + var installResults = if futures.len > 0: await all(futures) else: @[] + for (deps, pkg) in mitems(installResults): + addDepsToResultSet(deps, resultSet) + fillMetaData(pkg, pkg.getRealDir(), false) + pkgList[].add pkg + reverseDependencies[].add(pkg.basicInfo) + +proc processFreeDependencies(pkgInfo: PackageInfo, options: Options): + Future[HashSet[PackageInfo]] {.async.} = ## Verifies and installs dependencies. ## ## Returns set of PackageInfo (for paths) to pass to the compiler @@ -83,7 +143,13 @@ proc processFreeDependencies(pkgInfo: PackageInfo, options: Options): [pkgInfo.basicInfo.name, $pkgInfo.basicInfo.version], priority = HighPriority) - var reverseDependencies: seq[PackageBasicInfo] = @[] + var + reverseDependencies: seq[PackageBasicInfo] = @[] + installFutures {.global.}: + Table[string, seq[Future[PackageDependenciesInfo]]] + currentlyWaitingFutures: seq[Future[PackageDependenciesInfo]] + dependenciesToCheckAgain: seq[PkgTuple] + for dep in pkgInfo.requires: if dep.name == "nimrod" or dep.name == "nim": let nimVer = getNimrodVersion(options) @@ -91,46 +157,55 @@ proc processFreeDependencies(pkgInfo: PackageInfo, options: Options): let msg = "Unsatisfied dependency: " & dep.name & " (" & $dep.ver & ")" raise nimbleError(msg) else: - let resolvedDep = dep.resolveAlias(options) - display("Checking", "for $1" % $resolvedDep, priority = MediumPriority) - var pkg = initPackageInfo() - var found = findPkg(pkgList, resolvedDep, pkg) - # Check if the original name exists. - if not found and resolvedDep.name != dep.name: - display("Checking", "for $1" % $dep, priority = MediumPriority) - found = findPkg(pkgList, dep, pkg) - if found: - displayWarning(&"Installed package {dep.name} should be renamed to " & - resolvedDep.name) - + var + pkg = initPackageInfo() + resolvedDep: PkgTuple + let found = checkForAlreadyInstalledPkg( + dep, pkgList, resolvedDep, pkg, options) if not found: - display("Installing", $resolvedDep, priority = HighPriority) - let toInstall = @[(resolvedDep.name, resolvedDep.ver)] - let (packages, installedPkg) = install(toInstall, options, - doPrompt = false, first = false, fromLockFile = false) - - for pkg in packages: - if result.contains pkg: - # If the result already contains the newly tried to install package - # we had to merge its special versions set into the set of the old - # one. - result[pkg].metaData.specialVersions.incl( - pkg.metaData.specialVersions) - else: - result.incl pkg - - pkg = installedPkg # For addRevDep - fillMetaData(pkg, pkg.getRealDir(), false) - - # This package has been installed so we add it to our pkgList. - pkgList.add pkg + let + (_, resolvedDepUrl, metadata) = getDownloadInfo(resolvedDep, options, true) + subdir = metadata.getOrDefault("subdir") + url = resolvedDepUrl.removeTrailingGitString & (if subdir != "": "#" & subdir else: "") + + if installFutures.hasKey url: + currentlyWaitingFutures.add installFutures[url] + dependenciesToCheckAgain.add resolvedDep + else: + display("Installing", $resolvedDep, priority = HighPriority) + let future = install(@[resolvedDep], options, + doPrompt = false, first = false, fromLockFile = false) + installFutures[url] = @[future] + currentlyWaitingFutures.add future else: - displayInfo(pkgDepsAlreadySatisfiedMsg(dep)) - result.incl pkg - # Process the dependencies of this dependency. - result.incl processFreeDependencies(pkg.toFullInfo(options), options) - if not pkg.isLink: - reverseDependencies.add(pkg.basicInfo) + await processFreeDependenciesOfAlreadyInstalledPkg( + result.addr, reverseDependencies.addr, dep, pkg, options) + + await awaitInstallFutures(currentlyWaitingFutures, + pkgList.addr, reverseDependencies.addr, result.addr) + + currentlyWaitingFutures.setLen(0) + + for dep in dependenciesToCheckAgain: + var + pkg = initPackageInfo() + resolvedDep: PkgTuple + let found = checkForAlreadyInstalledPkg( + dep, pkgList, resolvedDep, pkg, options) + if not found: + let(_, resolvedDepUrl, _) = getDownloadInfo(resolvedDep, options, true) + let url = resolvedDepUrl.removeTrailingGitString + display("Installing", $resolvedDep, priority = HighPriority) + let future = install(@[resolvedDep], options, + doPrompt = false, first = false, fromLockFile = false) + installFutures[url].add future + currentlyWaitingFutures.add future + else: + await processFreeDependenciesOfAlreadyInstalledPkg( + result.addr, reverseDependencies.addr, dep, pkg, options) + + await awaitInstallFutures(currentlyWaitingFutures, + pkgList.addr, reverseDependencies.addr, result.addr) # Check if two packages of the same name (but different version) are listed # in the path. @@ -327,12 +402,12 @@ proc processAllDependencies(pkgInfo: PackageInfo, options: Options): if pkgInfo.lockedDeps.len > 0: pkgInfo.processLockedDependencies(options) else: - pkgInfo.processFreeDependencies(options) + waitFor pkgInfo.processFreeDependencies(options) proc installFromDir(dir: string, requestedVer: VersionRange, options: Options, url: string, first: bool, fromLockFile: bool, vcsRevision = notSetSha1Hash): - PackageDependenciesInfo = + Future[PackageDependenciesInfo] {.async.} = ## Returns where package has been installed to, together with paths ## to the packages this package depends on. ## @@ -375,7 +450,7 @@ proc installFromDir(dir: string, requestedVer: VersionRange, options: Options, if first and pkgInfo.lockedDeps.len > 0: result.deps = pkgInfo.processLockedDependencies(depsOptions) elif not fromLockFile: - result.deps = pkgInfo.processFreeDependencies(depsOptions) + result.deps = await pkgInfo.processFreeDependencies(depsOptions) if options.depsOnly: result.pkg = pkgInfo @@ -510,14 +585,21 @@ proc getDependency(name: string, dep: LockFileDep, options: Options): getInstalledPackageMin(depDirName, nimbleFilePath).toFullInfo(options) type - DownloadInfo = ref object + LockedDepDownloadInfo = ref object ## Information for a downloaded dependency needed for installation. name: string dependency: LockFileDep url: string version: VersionRange downloadDir: string - vcsRevision: Sha1Hash + vcsRevision: Sha1HashRef + + DownloadQueue = ref seq[tuple[name: string, dep: LockFileDep]] + ## A queue of dependencies from the lock file which to be downloaded. + + DownloadResults = ref seq[LockedDepDownloadInfo] + ## A list of `LockedDepDownloadInfo` objects used for installing the + ## downloaded dependencies. proc developWithDependencies(options: Options): bool = ## Determines whether the current executed action is a develop sub-command @@ -530,7 +612,7 @@ proc raiseCannotCloneInExistingDirException(downloadDir: string) = raise nimbleError(msg, hint) proc downloadDependency(name: string, dep: LockFileDep, options: Options): - DownloadInfo = + Future[LockedDepDownloadInfo] {.async.} = ## Downloads a dependency from the lock file. if options.offline: raise nimbleError("Cannot download in offline mode.") @@ -551,18 +633,18 @@ proc downloadDependency(name: string, dep: LockFileDep, options: Options): if options.developWithDependencies: displayWarning(skipDownloadingInAlreadyExistingDirectoryMsg( downloadPath, name)) - result = DownloadInfo( + result = LockedDepDownloadInfo( name: name, dependency: dep, url: url, version: version, downloadDir: downloadPath, - vcsRevision: dep.vcsRevision) + vcsRevision: dep.vcsRevision.newClone) return else: raiseCannotCloneInExistingDirException(downloadPath) - let (downloadDir, _, vcsRevision) = downloadPkg( + let (downloadDir, _, vcsRevision) = await downloadPkg( url, version, dep.downloadMethod, subdir, options, downloadPath, dep.vcsRevision) @@ -571,7 +653,7 @@ proc downloadDependency(name: string, dep: LockFileDep, options: Options): raise checksumError(name, dep.version, dep.vcsRevision, downloadedPackageChecksum, dep.checksums.sha1) - result = DownloadInfo( + result = LockedDepDownloadInfo( name: name, dependency: dep, url: url, @@ -579,17 +661,17 @@ proc downloadDependency(name: string, dep: LockFileDep, options: Options): downloadDir: downloadDir, vcsRevision: vcsRevision) -proc installDependency(pkgInfo: PackageInfo, downloadInfo: DownloadInfo, +proc installDependency(pkgInfo: PackageInfo, downloadInfo: LockedDepDownloadInfo, options: Options): PackageInfo = ## Installs an already downloaded dependency of the package `pkgInfo`. - let (_, newlyInstalledPkgInfo) = installFromDir( + let (_, newlyInstalledPkgInfo) = waitFor installFromDir( downloadInfo.downloadDir, downloadInfo.version, options, downloadInfo.url, first = false, fromLockFile = true, - downloadInfo.vcsRevision) + downloadInfo.vcsRevision[]) downloadInfo.downloadDir.removeDir @@ -601,6 +683,31 @@ proc installDependency(pkgInfo: PackageInfo, downloadInfo: DownloadInfo, return newlyInstalledPkgInfo +proc startDownloadWorker(queue: DownloadQueue, options: Options, + downloadResults: DownloadResults) {.async.} = + ## Starts a new download worker. + while queue[].len > 0: + let download = queue[].pop + let index = queue[].len + downloadResults[index] = await downloadDependency( + download.name, download.dep, options) + +proc lockedDepsDownload(dependenciesToDownload: DownloadQueue, + options: Options): DownloadResults = + ## By given queue with dependencies to download performs the downloads and + ## returns the result objects. + + result.new + result[].setLen(dependenciesToDownload[].len) + + var downloadWorkers: seq[Future[void]] + let workersCount = min( + options.maxParallelDownloads, dependenciesToDownload[].len) + for i in 0 ..< workersCount: + downloadWorkers.add startDownloadWorker( + dependenciesToDownload, options, result) + waitFor all(downloadWorkers) + proc processLockedDependencies(pkgInfo: PackageInfo, options: Options): HashSet[PackageInfo] = # Returns a hash set with `PackageInfo` of all packages from the lock file of @@ -611,20 +718,27 @@ proc processLockedDependencies(pkgInfo: PackageInfo, options: Options): let developModeDeps = getDevelopDependencies(pkgInfo, options) + var dependenciesToDownload: DownloadQueue + dependenciesToDownload.new + for name, dep in pkgInfo.lockedDeps: if developModeDeps.hasKey(name): result.incl developModeDeps[name][] elif isInstalled(name, dep, options): result.incl getDependency(name, dep, options) elif not options.offline: - let downloadResult = downloadDependency(name, dep, options) - result.incl installDependency(pkgInfo, downloadResult, options) + dependenciesToDownload[].add (name, dep) else: raise nimbleError("Unsatisfied dependency: " & pkgInfo.basicInfo.name) -proc getDownloadInfo*(pv: PkgTuple, options: Options, - doPrompt: bool, ignorePackageCache = false): (DownloadMethod, string, - Table[string, string]) = + let downloadResults = lockedDepsDownload(dependenciesToDownload, options) + for downloadResult in downloadResults[]: + result.incl installDependency(pkgInfo, downloadResult, options) + +proc getDownloadInfo(pv: PkgTuple, + options: Options, + doPrompt: bool, + ignorePackageCache = false): DownloadInfo = if pv.name.isURL: let (url, metadata) = getUrlData(pv.name) return (checkUrlType(url), url, metadata) @@ -650,7 +764,8 @@ proc getDownloadInfo*(pv: PkgTuple, options: Options, raise nimbleError(pkgNotFoundMsg(pv)) proc install(packages: seq[PkgTuple], options: Options, - doPrompt, first, fromLockFile: bool): PackageDependenciesInfo = + doPrompt, first, fromLockFile: bool): + Future[PackageDependenciesInfo] {.async.} = ## ``first`` ## True if this is the first level of the indirect recursion. ## ``fromLockFile`` @@ -662,19 +777,19 @@ proc install(packages: seq[PkgTuple], options: Options, displayWarning( "Installing a package which currently has develop mode dependencies." & "\nThey will be ignored and installed as normal packages.") - result = installFromDir(currentDir, newVRAny(), options, "", first, - fromLockFile) + result = await installFromDir(currentDir, newVRAny(), options, "", first, + fromLockFile) else: # Install each package. for pv in packages: let (meth, url, metadata) = getDownloadInfo(pv, options, doPrompt) let subdir = metadata.getOrDefault("subdir") let (downloadDir, downloadVersion, vcsRevision) = - downloadPkg(url, pv.ver, meth, subdir, options, - downloadPath = "", vcsRevision = notSetSha1Hash) + await downloadPkg(url, pv.ver, meth, subdir, options, + downloadPath = "", vcsRevision = notSetSha1Hash) try: - result = installFromDir(downloadDir, pv.ver, options, url, - first, fromLockFile, vcsRevision) + result = await installFromDir(downloadDir, pv.ver, options, url, + first, fromLockFile, vcsRevision[]) except BuildFailed as error: # The package failed to build. # Check if we tried building a tagged version of the package. @@ -690,8 +805,8 @@ proc install(packages: seq[PkgTuple], options: Options, [pv.name, $downloadVersion]) if promptResult: let toInstall = @[(pv.name, headVer.toVersionRange())] - result = install(toInstall, options, doPrompt, first, - fromLockFile = false) + result = await install(toInstall, options, doPrompt, first, + fromLockFile = false) else: raise buildFailed( "Aborting installation due to build failure.", details = error) @@ -809,7 +924,7 @@ proc search(options: Options) = onFound() if not found: - display("Error", "No package found.", Error, HighPriority) + display("Error", "No package found.", DisplayType.Error, HighPriority) proc list(options: Options) = if needsRefresh(options): @@ -1259,8 +1374,8 @@ proc installDevelopPackage(pkgTup: PkgTuple, options: var Options): else: pkgTup.ver - discard downloadPkg(url, ver, meth, subdir, options, downloadDir, - vcsRevision = notSetSha1Hash) + discard waitFor downloadPkg(url, ver, meth, subdir, options, downloadDir, + vcsRevision = notSetSha1Hash) let pkgDir = downloadDir / subdir var pkgInfo = getPkgInfo(pkgDir, options) @@ -1274,12 +1389,18 @@ proc developLockedDependencies(pkgInfo: PackageInfo, alreadyDownloaded: var HashSet[string], options: var Options) = ## Downloads for develop the dependencies from the lock file. + var dependenciesToDownload: DownloadQueue + dependenciesToDownload.new + for name, dep in pkgInfo.lockedDeps: if dep.url.removeTrailingGitString notin alreadyDownloaded: - let downloadResult = downloadDependency(name, dep, options) - alreadyDownloaded.incl downloadResult.url.removeTrailingGitString - options.action.devActions.add( - (datAdd, downloadResult.downloadDir.normalizedPath)) + dependenciesToDownload[].add (name, dep) + + let downloadResults = lockedDepsDownload(dependenciesToDownload, options) + for downloadResult in downloadResults[]: + alreadyDownloaded.incl downloadResult.url.removeTrailingGitString + options.action.devActions.add( + (datAdd, downloadResult.downloadDir.normalizedPath)) proc check(alreadyDownloaded: HashSet[string], dep: PkgTuple, options: Options): bool = @@ -1595,7 +1716,7 @@ proc lock(options: Options) = let doesLockFileExist = displayLockOperationStart(currentDir) var errors = validateDevModeDepsWorkingCopiesBeforeLock(pkgInfo, options) - let dependencies = pkgInfo.processFreeDependencies(options).map( + let dependencies = (waitFor pkgInfo.processFreeDependencies(options)).map( pkg => pkg.toFullInfo(options)).toSeq pkgInfo.validateDevelopDependenciesVersionRanges(dependencies, options) var dependencyGraph = buildDependencyGraph(dependencies, options) @@ -1918,10 +2039,10 @@ proc doAction(options: var Options) = of actionRefresh: refresh(options) of actionInstall: - let (_, pkgInfo) = install(options.action.packages, options, - doPrompt = true, - first = true, - fromLockFile = false) + let (_, pkgInfo) = waitFor install(options.action.packages, options, + doPrompt = true, + first = true, + fromLockFile = false) if options.action.packages.len == 0: nimScriptHint(pkgInfo) if pkgInfo.foreignDeps.len > 0: @@ -2008,7 +2129,6 @@ when isMainModule: except CatchableError as error: exitCode = QuitFailure displayTip() - echo error.getStackTrace() displayError(error) finally: try: diff --git a/src/nimblepkg/download.nim b/src/nimblepkg/download.nim index 2797b1eeb..7795a7e8f 100644 --- a/src/nimblepkg/download.nim +++ b/src/nimblepkg/download.nim @@ -2,7 +2,8 @@ # BSD License. Look at license.txt for more info. import parseutils, os, osproc, strutils, tables, pegs, uri, strformat, - httpclient, json, sequtils + json, sequtils, chronos/asyncproc, chronos, + chronos/apps/http/httpclient, stew/[base10, byteutils] from algorithm import SortOrder, sorted @@ -13,47 +14,52 @@ type DownloadPkgResult* = tuple dir: string version: Version - vcsRevision: Sha1Hash + vcsRevision: Sha1HashRef -proc updateSubmodules(dir: string) = - discard tryDoCmdEx( - &"git -C {dir} submodule update --init --recursive --depth 1") +proc updateSubmodules(downloadDir: string) {.async.} = + discard await tryDoCmdExAsync("git", + @["-C", downloadDir, "submodule", "update", "--recursive", "--depth", "1"]) -proc doCheckout(meth: DownloadMethod, downloadDir, branch: string) = +proc doCheckout(meth: DownloadMethod, downloadDir, branch: string): + Future[void] {.async.} = case meth of DownloadMethod.git: # Force is used here because local changes may appear straight after a clone # has happened. Like in the case of git on Windows where it messes up the # damn line endings. - discard tryDoCmdEx(&"git -C {downloadDir} checkout --force {branch}") - downloadDir.updateSubmodules + + discard await tryDoCmdExAsync("git", + @["-C", downloadDir, "checkout", "--force", branch]) + await downloadDir.updateSubmodules of DownloadMethod.hg: - discard tryDoCmdEx(&"hg --cwd {downloadDir} checkout {branch}") + discard await tryDoCmdExAsync("hg", + @["--cwd", downloadDir, "checkout", branch]) proc doClone(meth: DownloadMethod, url, downloadDir: string, branch = "", - onlyTip = true) = + onlyTip = true) {.async.} = case meth of DownloadMethod.git: let - depthArg = if onlyTip: "--depth 1" else: "" - branchArg = if branch == "": "" else: &"-b {branch}" - discard tryDoCmdEx( - "git clone --config core.autocrlf=false --recursive " & - &"{depthArg} {branchArg} {url} {downloadDir}") + depthArgs = if onlyTip: @["--depth", "1"] else: @[] + branchArgs = if branch == "": @[] else: @["-b", branch] + discard await tryDoCmdExAsync("git", concat(@["clone", "--recursive"], + depthArgs, branchArgs, @[url, downloadDir])) of DownloadMethod.hg: let - tipArg = if onlyTip: "-r tip " else: "" - branchArg = if branch == "": "" else: &"-b {branch}" - discard tryDoCmdEx(&"hg clone {tipArg} {branchArg} {url} {downloadDir}") + tipArgs = if onlyTip: @["-r", "tip"] else: @[] + branchArgs = if branch == "": @[] else: @["-b", branch] + discard await tryDoCmdExAsync("hg", + concat(@["clone"], tipArgs, branchArgs, @[url, downloadDir])) -proc getTagsList(dir: string, meth: DownloadMethod): seq[string] = +proc getTagsList(dir: string, meth: DownloadMethod): + Future[seq[string]] {.async.} = var output: string cd dir: case meth of DownloadMethod.git: - output = tryDoCmdEx("git tag") + output = await tryDoCmdExAsync("git", @["tag"]) of DownloadMethod.hg: - output = tryDoCmdEx("hg tags") + output = await tryDoCmdExAsync("hg", @["tags"]) if output.len > 0: case meth of DownloadMethod.git: @@ -72,11 +78,13 @@ proc getTagsList(dir: string, meth: DownloadMethod): seq[string] = else: result = @[] -proc getTagsListRemote*(url: string, meth: DownloadMethod): seq[string] = +proc getTagsListRemote*(url: string, meth: DownloadMethod): + Future[seq[string]] {.async.} = result = @[] case meth of DownloadMethod.git: - var (output, exitCode) = doCmdEx(&"git ls-remote --tags {url}") + var (output, exitCode) = await doCmdExAsync("git", + @["ls-remote", "--tags", url]) if exitCode != QuitSuccess: raise nimbleError("Unable to query remote tags for " & url & ". Git returned: " & output) @@ -139,22 +147,20 @@ proc isURL*(name: string): bool = proc cloneSpecificRevision(downloadMethod: DownloadMethod, url, downloadDir: string, - vcsRevision: Sha1Hash) = + vcsRevision: Sha1Hash) {.async.} = assert vcsRevision != notSetSha1Hash display("Cloning", "revision: " & $vcsRevision, priority = MediumPriority) case downloadMethod of DownloadMethod.git: let downloadDir = downloadDir.quoteShell createDir(downloadDir) - discard tryDoCmdEx(&"git -C {downloadDir} init") - discard tryDoCmdEx(&"git -C {downloadDir} config core.autocrlf false") - discard tryDoCmdEx(&"git -C {downloadDir} remote add origin {url}") - discard tryDoCmdEx( - &"git -C {downloadDir} fetch --depth 1 origin {vcsRevision}") - discard tryDoCmdEx(&"git -C {downloadDir} reset --hard FETCH_HEAD") - downloadDir.updateSubmodules + discard await tryDoCmdExAsync("git", @["-C", downloadDir, "init"]) + discard await tryDoCmdExAsync("git", @["-C", downloadDir, "remote", "add", "origin", url]) + discard await tryDoCmdExAsync("git", @["-C", downloadDir, "fetch", "--depth", "1", "origin", $vcsRevision]) + discard await tryDoCmdExAsync("git", @["-C", downloadDir, "reset", "--hard", "FETCH_HEAD"]) + await downloadDir.updateSubmodules of DownloadMethod.hg: - discard tryDoCmdEx(&"hg clone {url} -r {vcsRevision}") + discard await tryDoCmdExAsync("hg", @["clone", url, "-r", $vcsRevision]) proc getTarExePath: string = ## Returns path to `tar` executable. @@ -228,22 +234,33 @@ proc getGitHubApiUrl(url, commit: string): string = ## an URL for the GitHub REST API query for the full commit hash. &"https://api.github.com/repos/{extractOwnerAndRepo(url)}/commits/{commit}" -proc getUrlContent(url: string): string = +proc getUrlContent(url: string): Future[string] {.async.} = ## Makes a GET request to `url`. - let client = newHttpClient() - return client.getContent(url) + let + session = HttpSessionRef.new() + adress = session.getAddress(url).get + req = HttpClientRequestRef.get(session, adress) + res = await req.send() + status = res.status + + if status < 200 or status >= 300: # status is not 2xx (success) + raise nimbleError("Cannot get url content for '$1', returned status $2" % [url, $status]) + + let resBytes = await res.getBodyBytes() + result = string.fromBytes(resBytes) {.warning[ProveInit]: off.} -proc getFullRevisionFromGitHubApi(url, version: string): Sha1Hash = +proc getFullRevisionFromGitHubApi(url, version: string): + Future[Sha1HashRef] {.async.} = ## By given a commit short hash and an URL to a GitHub repository retrieves ## the full hash of the commit by using GitHub REST API. try: let gitHubApiUrl = getGitHubApiUrl(url, version) display("Get", gitHubApiUrl); - let content = getUrlContent(gitHubApiUrl) + let content = await getUrlContent(gitHubApiUrl) let json = parseJson(content) if json.hasKey("sha"): - return json["sha"].str.initSha1Hash + return json["sha"].str.initSha1Hash.newClone else: raise nimbleError(json["message"].str) except CatchableError as error: @@ -251,7 +268,7 @@ proc getFullRevisionFromGitHubApi(url, version: string): Sha1Hash = &"of package at \"{url}\".", details = error) {.warning[ProveInit]: on.} -proc parseRevision(lsRemoteOutput: string): Sha1Hash = +proc parseRevision(lsRemoteOutput: string): Sha1HashRef = ## Parses the output from `git ls-remote` call to extract the returned sha1 ## hash value. Even when successful the first line of the command's output ## can be a redirection warning. @@ -259,42 +276,43 @@ proc parseRevision(lsRemoteOutput: string): Sha1Hash = for line in lines: if line.len >= 40: try: - return line[0..39].initSha1Hash + return line[0..39].initSha1Hash.newClone except InvalidSha1HashError: discard - return notSetSha1Hash + return notSetSha1Hash.newClone -proc getRevision(url, version: string): Sha1Hash = +proc getRevision(url, version: string): Future[Sha1HashRef] {.async.} = ## Returns the commit hash corresponding to the given `version` of the package ## in repository at `url`. - let output = tryDoCmdEx(&"git ls-remote {url} {version}") + let output = await tryDoCmdExAsync("git", @["ls-remote", url, $version]) result = parseRevision(output) - if result == notSetSha1Hash: + if result[] == notSetSha1Hash: if version.seemsLikeRevision: - result = getFullRevisionFromGitHubApi(url, version) + result = await getFullRevisionFromGitHubApi(url, version) else: raise nimbleError(&"Cannot get revision for version \"{version}\" " & &"of package at \"{url}\".") -proc getTarCmdLine(downloadDir, filePath: string): string = +proc getTarCmdLine(downloadDir, filePath: string): + tuple[cmd: string, args: seq[string]] = ## Returns an OS specific command and arguments for extracting the downloaded ## tarball. when defined(Windows): let downloadDir = downloadDir.replace('\\', '/') let filePath = filePath.replace('\\', '/') - &"{getTarExePath()} -C {downloadDir} -xf {filePath} --strip-components 1 " & - "--force-local" + (getTarExePath(), @["-C", downloadDir, "-xf", filePath, + "--strip-components", "1", "--force-local"]) else: - &"tar -C {downloadDir} -xf {filePath} --strip-components 1" + ("tar", @["-C", downloadDir, "-xf", filePath, "--strip-components", "1"]) proc doDownloadTarball(url, downloadDir, version: string, queryRevision: bool): - Sha1Hash = + Future[Sha1HashRef] {.async.} = ## Downloads package tarball from GitHub. Returns the commit hash of the ## downloaded package in the case `queryRevision` is `true`. let downloadLink = getTarballDownloadLink(url, version) display("Downloading", downloadLink) - let data = getUrlContent(downloadLink) + let data = await getUrlContent(downloadLink) display("Completed", "downloading " & downloadLink) let filePath = downloadDir / "tarball.tar.gz" @@ -304,8 +322,8 @@ proc doDownloadTarball(url, downloadDir, version: string, queryRevision: bool): display("Completed", "saving " & filePath) display("Unpacking", filePath) - let cmd = getTarCmdLine(downloadDir, filePath) - let (output, exitCode) = doCmdEx(cmd) + let (cmd, args) = getTarCmdLine(downloadDir, filePath) + let (output, exitCode) = await doCmdExAsync(cmd, args) if exitCode != QuitSuccess and not output.contains("Cannot create symlink to"): # If the command fails for reason different then unable establishing a # sym-link raise an exception. This reason for failure is common on Windows @@ -335,13 +353,14 @@ proc doDownloadTarball(url, downloadDir, version: string, queryRevision: bool): writeFile(downloadDir / linkName, linkPath) filePath.removeFile - return if queryRevision: getRevision(url, version) else: notSetSha1Hash + return if queryRevision: await getRevision(url, version) + else: notSetSha1Hash.newClone {.warning[ProveInit]: off.} -proc doDownload(url, downloadDir: string, verRange: VersionRange, +proc doDownload(url: string, downloadDir: string, verRange: VersionRange, downMethod: DownloadMethod, options: Options, vcsRevision: Sha1Hash): - tuple[version: Version, vcsRevision: Sha1Hash] = + Future[tuple[version: Version, vcsRevision: Sha1HashRef]] {.async.} = ## Downloads the repository specified by ``url`` using the specified download ## method. ## @@ -359,37 +378,38 @@ proc doDownload(url, downloadDir: string, verRange: VersionRange, if $latest.ver != "": result.version = latest.ver - result.vcsRevision = notSetSha1Hash + result.vcsRevision = notSetSha1Hash.newClone removeDir(downloadDir) if vcsRevision != notSetSha1Hash: if downloadTarball(url, options): - discard doDownloadTarball(url, downloadDir, $vcsRevision, false) + discard await doDownloadTarball(url, downloadDir, $vcsRevision, false) else: - cloneSpecificRevision(downMethod, url, downloadDir, vcsRevision) - result.vcsRevision = vcsRevision + await cloneSpecificRevision(downMethod, url, downloadDir, vcsRevision) + result.vcsRevision = vcsRevision.newClone elif verRange.kind == verSpecial: # We want a specific commit/branch/tag here. if verRange.spe == getHeadName(downMethod): # Grab HEAD. if downloadTarball(url, options): - result.vcsRevision = doDownloadTarball(url, downloadDir, "HEAD", true) + result.vcsRevision = await doDownloadTarball( + url, downloadDir, "HEAD", true) else: - doClone(downMethod, url, downloadDir, - onlyTip = not options.forceFullClone) + await doClone(downMethod, url, downloadDir, + onlyTip = not options.forceFullClone) else: assert ($verRange.spe)[0] == '#', "The special version must start with '#'." let specialVersion = substr($verRange.spe, 1) if downloadTarball(url, options): - result.vcsRevision = doDownloadTarball( + result.vcsRevision = await doDownloadTarball( url, downloadDir, specialVersion, true) else: # Grab the full repo. - doClone(downMethod, url, downloadDir, onlyTip = false) + await doClone(downMethod, url, downloadDir, onlyTip = false) # Then perform a checkout operation to get the specified branch/commit. # `spe` starts with '#', trim it. - doCheckout(downMethod, downloadDir, specialVersion) + await doCheckout(downMethod, downloadDir, specialVersion) result.version = verRange.spe else: case downMethod @@ -397,46 +417,48 @@ proc doDownload(url, downloadDir: string, verRange: VersionRange, # For Git we have to query the repo remotely for its tags. This is # necessary as cloning with a --depth of 1 removes all tag info. result.version = getHeadName(downMethod) - let versions = getTagsListRemote(url, downMethod).getVersionList() + let versions = (await getTagsListRemote(url, downMethod)).getVersionList() if versions.len > 0: getLatestByTag: if downloadTarball(url, options): let versionToDownload = if latest.tag.len > 0: latest.tag else: "HEAD" - result.vcsRevision = doDownloadTarball( + result.vcsRevision = await doDownloadTarball( url, downloadDir, versionToDownload, true) else: display("Cloning", "latest tagged version: " & latest.tag, priority = MediumPriority) - doClone(downMethod, url, downloadDir, latest.tag, - onlyTip = not options.forceFullClone) + await doClone(downMethod, url, downloadDir, latest.tag, + onlyTip = not options.forceFullClone) else: - display("Warning:", "The package has no tagged releases, downloading HEAD instead.", Warning, + display("Warning:", "The package has no tagged releases, downloading HEAD instead.", Warning, priority = HighPriority) if downloadTarball(url, options): - result.vcsRevision = doDownloadTarball(url, downloadDir, "HEAD", true) + result.vcsRevision = await doDownloadTarball( + url, downloadDir, "HEAD", true) else: # If no commits have been tagged on the repo we just clone HEAD. - doClone(downMethod, url, downloadDir) # Grab HEAD. + await doClone(downMethod, url, downloadDir) # Grab HEAD. of DownloadMethod.hg: - doClone(downMethod, url, downloadDir, - onlyTip = not options.forceFullClone) + await doClone(downMethod, url, downloadDir, + onlyTip = not options.forceFullClone) result.version = getHeadName(downMethod) - let versions = getTagsList(downloadDir, downMethod).getVersionList() + let versions = + (await getTagsList(downloadDir, downMethod)).getVersionList() if versions.len > 0: getLatestByTag: display("Switching", "to latest tagged version: " & latest.tag, priority = MediumPriority) - doCheckout(downMethod, downloadDir, latest.tag) + await doCheckout(downMethod, downloadDir, latest.tag) else: - display("Warning:", "The package has no tagged releases, downloading HEAD instead.", Warning, + display("Warning:", "The package has no tagged releases, downloading HEAD instead.", Warning, priority = HighPriority) - if result.vcsRevision == notSetSha1Hash: + if result.vcsRevision[] == notSetSha1Hash: # In the case the package in not downloaded as tarball we must query its # VCS revision from its download directory. - result.vcsRevision = downloadDir.getVcsRevision + result.vcsRevision = downloadDir.getVcsRevision.newClone {.warning[ProveInit]: on.} proc downloadPkg*(url: string, verRange: VersionRange, @@ -444,7 +466,7 @@ proc downloadPkg*(url: string, verRange: VersionRange, subdir: string, options: Options, downloadPath: string, - vcsRevision: Sha1Hash): DownloadPkgResult = + vcsRevision: Sha1Hash): Future[DownloadPkgResult] {.async.} = ## Downloads the repository as specified by ``url`` and ``verRange`` using ## the download method specified. ## @@ -490,8 +512,17 @@ proc downloadPkg*(url: string, verRange: VersionRange, priority = HighPriority) result.dir = downloadDir / subdir - (result.version, result.vcsRevision) = doDownload( - modUrl, downloadDir, verRange, downMethod, options, vcsRevision) + + var downloadsInProgress {.global.}: Table[tuple[modUrl: string, downloadDir: string], + Future[tuple[version: Version, vcsRevision: Sha1HashRef]]] = + initTable[tuple[modUrl: string, downloadDir: string], Future[tuple[version: Version, vcsRevision: Sha1HashRef]]]() + + let key = (modUrl, downloadDir) + if not downloadsInProgress.hasKey key: + downloadsInProgress[key] = + doDownload(modUrl, downloadDir, verRange, downMethod, options, vcsRevision) + + (result.version, result.vcsRevision) = await downloadsInProgress[key] if verRange.kind != verSpecial: ## Makes sure that the downloaded package's version satisfies the requested @@ -508,7 +539,8 @@ proc echoPackageVersions*(pkg: Package) = case downMethod of DownloadMethod.git: try: - let versions = getTagsListRemote(pkg.url, downMethod).getVersionList() + let versions = + (waitFor getTagsListRemote(pkg.url, downMethod)).getVersionList() if versions.len > 0: let sortedVersions = toSeq(values(versions)) echo(" versions: " & join(sortedVersions, ", ")) diff --git a/src/nimblepkg/lockfile.nim b/src/nimblepkg/lockfile.nim index 7e871c908..a80b80336 100644 --- a/src/nimblepkg/lockfile.nim +++ b/src/nimblepkg/lockfile.nim @@ -14,7 +14,7 @@ const lockFileName* = "nimble.lock" lockFileVersion = 1 -proc initLockFileDep*: LockFileDep = +proc initLockFileDep(): LockFileDep = result = LockFileDep( version: notSetVersion, vcsRevision: notSetSha1Hash, diff --git a/src/nimblepkg/options.nim b/src/nimblepkg/options.nim index 49b8e0541..3db6c484a 100644 --- a/src/nimblepkg/options.nim +++ b/src/nimblepkg/options.nim @@ -47,6 +47,8 @@ type # For which package in the dependency tree the command should be executed. # If not provided by default it applies to the current directory package. # For now, it is used only by the run action and it is ignored by others. + maxParallelDownloads*: int # This is the maximum number of parallel + # downloads. 0 means no limit. ActionType* = enum actionNil, actionRefresh, actionInit, actionDump, actionPublish, @@ -208,6 +210,8 @@ Nimble Options: action and it is ignored by others. -t, --tarballs Enable downloading of packages as tarballs when working with GitHub repositories. + -m, --max-parallel-downloads The maximum number of parallel downloads. + The default value is 20. Use 0 for no limit. --ver Query remote server for package version information when searching or listing packages. --nimbleDir:dirname Set the Nimble directory. @@ -505,6 +509,10 @@ proc parseFlag*(flag, val: string, result: var Options, kind = cmdLongOption) = of "nosslcheck": result.disableSslCertCheck = true of "tarballs", "t": result.enableTarballs = true of "package", "p": result.package = val + of "max-parallel-downloads", "m": + result.maxParallelDownloads = parseInt(val) + if result.maxParallelDownloads == 0: + result.maxParallelDownloads = int.high else: isGlobalFlag = false var wasFlagHandled = true @@ -612,6 +620,7 @@ proc initOptions*(): Options = verbosity: HighPriority, noColor: not isatty(stdout), startDir: getCurrentDir(), + maxParallelDownloads: 20, ) proc handleUnknownFlags(options: var Options) = diff --git a/src/nimblepkg/sha1hashes.nim b/src/nimblepkg/sha1hashes.nim index 5c12cdb6f..497052fbe 100644 --- a/src/nimblepkg/sha1hashes.nim +++ b/src/nimblepkg/sha1hashes.nim @@ -13,6 +13,8 @@ type ## procedure which validates the input. hashValue: string + Sha1HashRef* = ref Sha1Hash + const notSetSha1Hash* = Sha1Hash(hashValue: "") diff --git a/src/nimblepkg/tools.nim b/src/nimblepkg/tools.nim index 29b7027b8..cd215f493 100644 --- a/src/nimblepkg/tools.nim +++ b/src/nimblepkg/tools.nim @@ -3,10 +3,10 @@ # # Various miscellaneous utility functions reside here. import osproc, pegs, strutils, os, uri, sets, json, parseutils, strformat, - sequtils + sequtils, chronos from net import SslCVerifyMode, newContext, SslContext - +import chronos/asyncproc import version, cli, common, packageinfotypes, options, sha1hashes from compiler/nimblecmd import getPathVersionChecksum @@ -52,6 +52,23 @@ proc doCmdEx*(cmd: string): ProcessOutput = raise nimbleError("'" & bin & "' not in PATH.") return execCmdEx(cmd) +proc removeQuotes(cmd: string): string = + cmd.filterIt(it != '"').join + +proc doCmdExAsync*(cmd: string, args: seq[string] = @[]): + Future[ProcessOutput] {.async.} = + display("Executing", join(concat(@[cmd], args), " ")) + let bin = extractBin(cmd) + if findExe(bin) == "": + raise nimbleError("'" & bin & "' not in PATH.") + + let res = await execCommandEx( + command = cmd.removeQuotes, + arguments = args, + options = {UsePath}) + + return (res.stdOutput, res.status) + proc tryDoCmdExErrorMessage*(cmd, output: string, exitCode: int): string = &"Execution of '{cmd}' failed with an exit code {exitCode}.\n" & &"Details: {output}" @@ -62,6 +79,13 @@ proc tryDoCmdEx*(cmd: string): string {.discardable.} = raise nimbleError(tryDoCmdExErrorMessage(cmd, output, exitCode)) return output +proc tryDoCmdExAsync*(cmd: string, args: seq[string] = @[]): + Future[string] {.async.} = + let (output, exitCode) = await doCmdExAsync(cmd, args) + if exitCode != QuitSuccess: + raise nimbleError(tryDoCmdExErrorMessage(cmd & " " & args.join(" "), output, exitCode)) + return output + proc getNimBin*: string = result = "nim" if findExe("nim") != "": result = findExe("nim") @@ -139,7 +163,7 @@ proc getDownloadDirName*(uri: string, verRange: VersionRange, if verSimple != "": result.add "_" result.add verSimple - + if vcsRevision != notSetSha1Hash: result.add "_" result.add $vcsRevision @@ -197,7 +221,7 @@ proc getNameVersionChecksum*(pkgpath: string): PackageBasicInfo = return getNameVersionChecksum(pkgPath.splitPath.head) let (name, version, checksum) = getPathVersionChecksum(pkgPath.splitPath.tail) - let sha1Checksum = + let sha1Checksum = try: initSha1Hash(checksum) except InvalidSha1HashError: diff --git a/tests/config.nims b/tests/config.nims index 6dc509fc0..893035c94 100644 --- a/tests/config.nims +++ b/tests/config.nims @@ -2,3 +2,5 @@ import os let buildDir = currentSourcePath().parentDir.parentDir / "buildTests" switch("outdir", buildDir) +when fileExists("tester.paths"): + include "tester.paths" diff --git a/tests/nim.cfg b/tests/nim.cfg index 1887b8b09..9a89272cc 100644 --- a/tests/nim.cfg +++ b/tests/nim.cfg @@ -1,2 +1,6 @@ --path:"$nim/" --path:"../src/" +--path:"../nim-chronos/" +--path:"../nim-stew/" +--path:"../nim-bearssl/" +--threads:off diff --git a/tests/testscommon.nim b/tests/testscommon.nim index ee0317918..29279275e 100644 --- a/tests/testscommon.nim +++ b/tests/testscommon.nim @@ -195,4 +195,4 @@ proc writeDevelopFile*(path: string, includes: seq[string], putEnv("NIMBLE_TEST_BINARY_PATH", nimblePath) # Always recompile. -doAssert execCmdEx("nim c " & nimbleCompilePath).exitCode == QuitSuccess +doAssert execCmdEx("nim c -d:release " & nimbleCompilePath).exitCode == QuitSuccess