From 4c8d147c92bada8d535931d64aa297997361a445 Mon Sep 17 00:00:00 2001 From: David Dias Date: Wed, 6 Dec 2017 08:24:22 +0000 Subject: [PATCH 001/102] Initial commit --- .gitignore | 59 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ LICENSE | 21 +++++++++++++++++++ README.md | 1 + 3 files changed, 81 insertions(+) create mode 100644 .gitignore create mode 100644 LICENSE create mode 100644 README.md diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000..00cbbdf53f --- /dev/null +++ b/.gitignore @@ -0,0 +1,59 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (http://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# Typescript v1 declaration files +typings/ + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variables file +.env + diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000..bbfffbf92d --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 libp2p + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000000..b45d833362 --- /dev/null +++ b/README.md @@ -0,0 +1 @@ +# js-libp2p-keychain \ No newline at end of file From 49e6c47c40924ad1d0b45be755f861547ca804aa Mon Sep 17 00:00:00 2001 From: David Dias Date: Wed, 6 Dec 2017 08:49:16 +0000 Subject: [PATCH 002/102] chore: setup repo --- .gitignore | 54 ++++++++++++++++------------------------------ .travis.yml | 32 +++++++++++++++++++++++++++ README.md | 34 ++++++++++++++++++++++++++++- appveyor.yml | 26 ++++++++++++++++++++++ circle.yml | 18 ++++++++++++++++ package.json | 44 +++++++++++++++++++++++++++++++++++++ src/index.js | 1 + test/index.spec.js | 4 ++++ 8 files changed, 177 insertions(+), 36 deletions(-) create mode 100644 .travis.yml create mode 100644 appveyor.yml create mode 100644 circle.yml create mode 100644 package.json create mode 100644 src/index.js create mode 100644 test/index.spec.js diff --git a/.gitignore b/.gitignore index 00cbbdf53f..1c73b3783a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,15 +1,19 @@ +docs +**/node_modules/ +**/*.log +test/repo-tests* +**/bundle.js + # Logs logs *.log -npm-debug.log* -yarn-debug.log* -yarn-error.log* + +coverage # Runtime data pids *.pid *.seed -*.pid.lock # Directory for instrumented libs generated by jscoverage/JSCover lib-cov @@ -17,43 +21,23 @@ lib-cov # Coverage directory used by tools like istanbul coverage -# nyc test coverage -.nyc_output - # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) .grunt -# Bower dependency directory (https://bower.io/) -bower_components - # node-waf configuration .lock-wscript -# Compiled binary addons (http://nodejs.org/api/addons.html) -build/Release - -# Dependency directories -node_modules/ -jspm_packages/ - -# Typescript v1 declaration files -typings/ - -# Optional npm cache directory -.npm - -# Optional eslint cache -.eslintcache - -# Optional REPL history -.node_repl_history - -# Output of 'npm pack' -*.tgz +build -# Yarn Integrity file -.yarn-integrity +# Dependency directory +# https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git +node_modules -# dotenv environment variables file -.env +lib +dist +test/test-data/go-ipfs-repo/LOCK +test/test-data/go-ipfs-repo/LOG +test/test-data/go-ipfs-repo/LOG.old +# while testing npm5 +package-lock.json diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000000..584f308f81 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,32 @@ +sudo: false +language: node_js + +matrix: + include: + - node_js: 6 + env: CXX=g++-4.8 + - node_js: 8 + env: CXX=g++-4.8 + # - node_js: stable + # env: CXX=g++-4.8 + +script: + - npm run lint + - npm run test + - npm run coverage + - make test + +before_script: + - export DISPLAY=:99.0 + - sh -e /etc/init.d/xvfb start + +after_success: + - npm run coverage-publish + +addons: + firefox: 'latest' + apt: + sources: + - ubuntu-toolchain-r-test + packages: + - g++-4.8 diff --git a/README.md b/README.md index b45d833362..95658501b8 100644 --- a/README.md +++ b/README.md @@ -1 +1,33 @@ -# js-libp2p-keychain \ No newline at end of file +# js-libp2p-keychain + +[![](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://ipn.io) +[![](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](http://ipfs.io/) +[![](https://img.shields.io/badge/freenode-%23ipfs-blue.svg?style=flat-square)](http://webchat.freenode.net/?channels=%23ipfs) +[![standard-readme compliant](https://img.shields.io/badge/standard--readme-OK-green.svg?style=flat-square)](https://github.com/RichardLitt/standard-readme) +[![Coverage Status](https://coveralls.io/repos/github/libp2p/js-libp2p-keychain/badge.svg?branch=master)](https://coveralls.io/github/libp2p/js-libp2p-keychain?branch=master) +[![Travis CI](https://travis-ci.org/libp2p/js-libp2p-keychain.svg?branch=master)](https://travis-ci.org/libp2p/js-libp2p-keychain) +[![Circle CI](https://circleci.com/gh/libp2p/js-libp2p-keychain.svg?style=svg)](https://circleci.com/gh/libp2p/js-libp2p-keychain) +[![Dependency Status](https://david-dm.org/libp2p/js-libp2p-keychain.svg?style=flat-square)](https://david-dm.org/libp2p/js-libp2p-keychain) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat-square)](https://github.com/feross/standard) +![](https://img.shields.io/badge/npm-%3E%3D3.0.0-orange.svg?style=flat-square) +![](https://img.shields.io/badge/Node.js-%3E%3D6.0.0-orange.svg?style=flat-square) + +> Keychain primitives for libp2p in JavaScript + +## Table of Contents + +## Install + +## API + +## Contribute + +Feel free to join in. All welcome. Open an [issue](https://github.com/libp2p/js-libp2p-crypto/issues)! + +This repository falls under the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md). + +[![](https://cdn.rawgit.com/jbenet/contribute-ipfs-gif/master/img/contribute.gif)](https://github.com/ipfs/community/blob/master/contributing.md) + +## License + +[MIT](LICENSE) diff --git a/appveyor.yml b/appveyor.yml new file mode 100644 index 0000000000..ba93339ba8 --- /dev/null +++ b/appveyor.yml @@ -0,0 +1,26 @@ +environment: + matrix: + - nodejs_version: "6" + - nodejs_version: "8" + +init: + - git config --global core.autocrlf input + +# cache: +# - node_modules + +platform: + - x64 + +install: + - ps: Install-Product node $env:nodejs_version $env:platform + - node --version + - npm --version + - npm install + +test_script: + - npm test + +build: off + +version: "{build}" diff --git a/circle.yml b/circle.yml new file mode 100644 index 0000000000..d67b6ae70b --- /dev/null +++ b/circle.yml @@ -0,0 +1,18 @@ +machine: + node: + version: stable + +test: + post: + - npm run coverage -- --upload + +dependencies: + pre: + - google-chrome --version + - curl -L -o google-chrome.deb https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb + - sudo dpkg -i google-chrome.deb || true + - sudo apt-get update + - sudo apt-get install -f + - sudo apt-get install --only-upgrade lsb-base + - sudo dpkg -i google-chrome.deb + - google-chrome --version diff --git a/package.json b/package.json new file mode 100644 index 0000000000..35ffcf5df8 --- /dev/null +++ b/package.json @@ -0,0 +1,44 @@ +{ + "name": "libp2p-keychain", + "version": "0.0.0", + "description": "", + "main": "src/index.js", + "scripts": { + "lint": "aegir lint", + "build": "aegir build", + "test": "aegir test", + "test:node": "aegir test -t node", + "test:browser": "aegir test -t browser -t webworker", + "release": "aegir release", + "release-minor": "aegir release --type minor", + "release-major": "aegir release --type major" + }, + "pre-commit": [ + "lint", + "test" + ], + "engines": { + "node": ">=6.0.0", + "npm": ">=3.0.0" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/libp2p/js-libp2p-keychain.git" + }, + "keywords": [ + "IPFS", + "libp2p", + "keys", + "crypto" + ], + "author": "David Dias ", + "license": "MIT", + "bugs": { + "url": "https://github.com/libp2p/js-libp2p-keychain/issues" + }, + "homepage": "https://github.com/libp2p/js-libp2p-keychain#readme", + "devDependencies": { + "aegir": "^12.2.0", + "pre-commit": "^1.2.2" + } +} diff --git a/src/index.js b/src/index.js new file mode 100644 index 0000000000..ccacec309b --- /dev/null +++ b/src/index.js @@ -0,0 +1 @@ +'use strict' diff --git a/test/index.spec.js b/test/index.spec.js new file mode 100644 index 0000000000..c638cf8684 --- /dev/null +++ b/test/index.spec.js @@ -0,0 +1,4 @@ +/* eslint-env mocha */ +'use strict' + +it('so much testing', () => {}) From 1a96ae8cb73396ec7fb64edb9897587697bbe026 Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Wed, 6 Dec 2017 22:56:09 +1300 Subject: [PATCH 003/102] feat: move bits from https://github.com/richardschneider/ipfs-encryption --- .gitattributes | 2 + README.md | 79 ++++++++- doc/private-key.png | Bin 0 -> 25518 bytes doc/private-key.xml | 1 + package.json | 29 +++- src/cms.js | 97 +++++++++++ src/index.js | 2 + src/keychain.js | 362 ++++++++++++++++++++++++++++++++++++++++++ src/util.js | 86 ++++++++++ test/browser.js | 30 ++++ test/index.spec.js | 4 - test/keychain.spec.js | 356 +++++++++++++++++++++++++++++++++++++++++ test/node.js | 34 ++++ test/peerid.js | 105 ++++++++++++ 14 files changed, 1178 insertions(+), 9 deletions(-) create mode 100644 .gitattributes create mode 100644 doc/private-key.png create mode 100644 doc/private-key.xml create mode 100644 src/cms.js create mode 100644 src/keychain.js create mode 100644 src/util.js create mode 100644 test/browser.js delete mode 100644 test/index.spec.js create mode 100644 test/keychain.spec.js create mode 100644 test/node.js create mode 100644 test/peerid.js diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000..ef41d4faa3 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,2 @@ +*.png binary +* crlf=input diff --git a/README.md b/README.md index 95658501b8..0f89dcdb77 100644 --- a/README.md +++ b/README.md @@ -12,14 +12,91 @@ ![](https://img.shields.io/badge/npm-%3E%3D3.0.0-orange.svg?style=flat-square) ![](https://img.shields.io/badge/Node.js-%3E%3D6.0.0-orange.svg?style=flat-square) -> Keychain primitives for libp2p in JavaScript +> A secure key chain for libp2p in JavaScript + +## Features + +- Manages the lifecycle of a key +- Keys are encrypted at rest +- Enforces the use of safe key names +- Uses encrypted PKCS 8 for key storage +- Uses PBKDF2 for a "stetched" key encryption key +- Enforces NIST SP 800-131A and NIST SP 800-132 +- Uses PKCS 7: CMS (aka RFC 5652) to provide cryptographically protected messages +- Delays reporting errors to slow down brute force attacks ## Table of Contents ## Install +### Usage + + const datastore = new FsStore('./a-keystore') + const opts = { + passPhrase: 'some long easily remembered phrase' + } + const keychain = new Keychain(datastore, opts) + ## API +Managing a key + +- `createKey (name, type, size, callback)` +- `renameKey (oldName, newName, callback)` +- `removeKey (name, callback)` +- `exportKey (name, password, callback)` +- `importKey (name, pem, password, callback)` +- `importPeer (name, peer, callback)` + +A naming service for a key + +- `listKeys (callback)` +- `findKeyById (id, callback)` +- `findKeyByName (name, callback)` + +Cryptographically protected messages + +- `cms.createAnonymousEncryptedData (name, plain, callback)` +- `cms.readData (cmsData, callback)` + +### KeyInfo + +The key management and naming service API all return a `KeyInfo` object. The `id` is a universally unique identifier for the key. The `name` is local to the key chain. + +``` +{ + name: 'rsa-key', + id: 'QmYWYSUZ4PV6MRFYpdtEDJBiGs4UrmE6g8wmAWSePekXVW' +} +``` + +The **key id** is the SHA-256 [multihash](https://github.com/multiformats/multihash) of its public key. The *public key* is a [protobuf encoding](https://github.com/libp2p/js-libp2p-crypto/blob/master/src/keys/keys.proto.js) containing a type and the [DER encoding](https://en.wikipedia.org/wiki/X.690) of the PKCS [SubjectPublicKeyInfo](https://www.ietf.org/rfc/rfc3279.txt). + +### Private key storage + +A private key is stored as an encrypted PKCS 8 structure in the PEM format. It is protected by a key generated from the key chain's *passPhrase* using **PBKDF2**. Its file extension is `.p8`. + +The default options for generating the derived encryption key are in the `dek` object +``` +const defaultOptions = { + createIfNeeded: true, + + //See https://cryptosense.com/parameter-choice-for-pbkdf2/ + dek: { + keyLength: 512 / 8, + iterationCount: 10000, + salt: 'you should override this value with a crypto secure random number', + hash: 'sha512' + } +} +``` + +![key storage](../doc/private-key.png?raw=true) + +### Physical storage + +The actual physical storage of an encrypted key is left to implementations of [interface-datastore](https://github.com/ipfs/interface-datastore/). A key benifit is that now the key chain can be used in browser with the [js-datastore-level](https://github.com/ipfs/js-datastore-level) implementation. + ## Contribute Feel free to join in. All welcome. Open an [issue](https://github.com/libp2p/js-libp2p-crypto/issues)! diff --git a/doc/private-key.png b/doc/private-key.png new file mode 100644 index 0000000000000000000000000000000000000000..4c85dc610c883942212ff3b419cad3cae3c24769 GIT binary patch literal 25518 zcmdSBbx>U0(=Hka2@)i@46Z?fy9Wpw+}$-WxCNKs1Hme;I3y6 z@B91iz2|(V?mxF~)uEC*La<8J;|$d?F|HM#IZ!?=xx|-em3lV%dVv2i}xdub6ZlHC}S7QrnrC@j6DZ zM6p8yrOm`+(E4JdxL$Q(T%=_#wXK9fJ9kNN8%7FzI|;z{}>A<7r4^(*@L-M&GAn)##3a> zJ(c1@g;cZaQ(~{fRvD*0Mh0uKn^g+qj(8dWr>VyNWUNhI1iTJ&Q5Kw$PcezU?AzK# zz8UBCY9tXR7uk`FSJ&Ea>w_DudH1my2IYzRHgj~1gs5+y`_~Ww-c*BQ=7B5$vdrpxTx5R-_6-AH0ksnDvIhGF zux0br#^lT|OPY*~4PYe;y>9+$b2u(jpYBd0Ba~y^=Tf|BCd^Y2M+z03+}T`Eja~;O=!brdS&rX%&_IR2$$+uR?vw+O;OTp)%`(oVIypDbU}v3sd4X|r zlk6svvA^f0q5NgT;W6lPYFU!QPUC4=!!;UGh=1r)1f&pz&~|^XM$xED3G_A}Ik>l? z!@V^Kd*0xq2Gd99)vo1MWQvYI0e>QrJ_cj-(5GaumyPIZ)OX&i_s}qvx7h=f(Y^xwMVCC@? zVu#Zu#5)Ez1|Qn)Z)y%SX;6oaDv{KcMDUeG?$NCV(y|73uXj^dZx$a|wZ7{Vb_~8? zyTz}ns)BA&8(ub?#0}~?<3=J8)pB&ozYB#M3HBCPY{PS=`mrde9POAa?TQW9K~i4< z-60ZbjFB?^9o6S|Z}vHOPZDdnIEB|IElO%+?nLN7cZGpT3>eLeai12$RbHV;!g_WX zhiVyxrcFP>cR{W`5CPmU^-&}E<>=QLf;KjY=H}*JM$^U1GQ`UlC3|s{?InPfNsM}! zUfsz)Pxs22l3MbbxFu;e)>7DZ1^bF#1WI9fery@^9 zQNnR>;Go4pJ*$r(sFmvQ627)KqY0*wkO_U)_tfU^HS+erqNM=z9qSag-y%pFyyXoK zZYlymNu4NyH26OHW>!3zv{i@EaMDwe=^d@m}9ixk@v=(J6{ zVr;$E;*m#3Mm{O_^HbeMF{fCLbBH#a4Bc>;wr%ZgFLy2G`0QKy?&^nEy(k*U%fJ&T zb73Y(fv@5RTHugCu83**rdc{M)JAQlB-Tj9Y#@5;#C871(Kz|w`iyNg1Z$zQBNDgb z!r+d-2L zoz-wQB)Yr-$66^Z^zQtRjNkd~3ES67+IbH1u9daKVU@O>1)o$JRIG;Mc5i`{*&NhF zAgl%&0$#C4ZH#cm%gbxF-1gDy;qJ%G`*Km}`5cJDae)Qqxk$sm{Y91k{_+UATlNq< z0NS4}f8HHO5qIM6jOp*R#{vCYjeohpht+wwjBK_KV`jG*X-eb$izj?PaKdjt^HTWw za5at75;29X8I;Kv5cu(h^(t0?PPl1%*h-T6P=|Tkd^E%Tp%@Uram*|21=kWq-JheO5FEXyJ{Yp9m0Pf~wvhbRkpR;x<3r zai#J8@O!1OA3xsClcW5Zl+|bX&`HtfHl!Ai@>8HDVAk){`pet$p;FX*(FCvCd=4BW zG~Ic?Iv&wDRLza=F@}WQN&T|`xHk@P>?_yL z88Kt?F>A?&Zb)(S+@UdKv6BF*;%k2`?jZ>!aK8Lg7jp-fTYi}?#AOo27=feaoQ3=u=0yYiQZAFxrburb3XekLul`K%D# zY|O;QJ^LrY+Ij|ffKkh8!8|VeP>fx}xt+m>(YLc8V}HvKk}w=+E}wHtojSL?>vu>4 zw(3fq=YRi9sw5dGD)zGn+_Ozu7iSBiDvRCOhvT-F*w@RNX7 zU#SIvW8RaQ;1fKo98-%sClb;xXuJxFMK+@!fssfyye`7*xxGw`a|X^*K;v~Ao3vc1 z7`=oaTCZ){ks{`1*yjDeNK~T>$gpk&;8GPF$bK)h^pZ1Pd7X-qX)$z!-Zj)^SEa!o z=%kX*8w10$GhR@J$4(`BAa+{pLL2N|D6Q1qD&g3?k1lwfEyc-N?APC#W=kiS5Q#^- zvl4_X+ME)ZyQPXJOH02Q!$@a-iZzh{_R}`M8dbjIrDk*Cgx=QR=xpPCO7i(*%H*(i zM!9uTx&;~D-0WRuz2G5u%*0%PLw|&Vg~mk65eF5UP_AQDjuyuS(M!HlErHs_jN0wg zlqP2wxqPuS?CV6y0#2M9v4nIeq=FE_swH(vlhRnd>O*Pj1e7 z)eb`}$y-9<*-HyKaGIPum8Tg%#+%NT^Js;M^GqW!v4wlx^gffVpY?pBi~Q09BNmZZ zeS5RLRA+KTIlFzp)U`VLsSjOyJsUH7N|-5_W}66`f-XZ?FPtn25~0#b6oyX+Pg*zu zbE$WAxyw(+P|@nrS>Cda;<>-FQlrRMTag}21UBKl!Y_zy46BpN=BBK@EUTZ_x6*!o zFD|~ZRXi&e_qs$T4U{%GfDh$iWw%jtBw>}69TWRqIE|wPys*G>yxa6g<)M%#osd5)T0!eP9T3tc z;^@xF*sfoMN|a0C)6AQy94_iX;8rI1875u>%)CV!%RpIA935e6Yx|ZZMMZ}H(w7?S zd-!2pMob}!@ZV|+1zt`Ct)i|)fdm58hrT7$^0wwmfnR}19mW6IQva22Edzt1S$8qO z6OY^kz`h#4&J5K&qd=)dQJu7K!-c;Dc76nvPRu^sSB2mY1|p$R0hU5}z3~18FqQsK zF7?OE=QZ%nbVivAnv%1+K(&?PqyM1;{%56PL6QPQkP19D+{a}17m%W;d(gjbMpK0) z`fpA ztUIMaLpoe2rtBx3ikEK0Lb!Og)R2@IO}z4ezHP_%``zIiNXRR9ZRO4uA3 z>BGkf{+#}Jn!N`Ugig67t2;$y_Eqq_mxn)V1*p#vLOuR72^go)E1e%cXc8H`I_9#y z(u`Osy0!0nEKCEF0kOY->914-jAM=_O)mj|A0Mc-ip<{qU5HYHL8m=f1mN$_ya94@ z%$qYZo^k*S` zr#}wOov>Oi=P^0!)hYdQmHh6_6Nz#0pjBCgUn-Akd;kP31d7oU%Ae-lm_P}KC7Oe^ z>_n8~dyKlXyE{5I7A!>_+=qZQw9>$TBRYyp(h^C$!V~BJKHQA$T+mB||8KP#1>gM@ zMV=m=Z(%(h%d@}PS|NO_Z-vRjr5xxVdX=7{@QQ}op{FIzm?dyNJ zJ*tfcS37q=7>%BCg+3Wk^u`|fIVMa2Low3J`+7Td=>^i#ICp5NM1)qjsyhA~O-&UH zRk@L{_{?DElg9w{FQzhdToN--8{eZSoZOc6BGc()5W>IvT>wZeaaMY1Z;i^M<|d}X zupwGhM5t}$wpp&~qO2_$aPUBU6!4WO>CG7Aw@5vfx4SWF>FQ{aFj^cr{6=D7ua{HJ zr_a@Txp69xd=V(V<(p1uDooIS*U}_bXdCRE7bqdZ{UV?`C%|he2Y6GKBH&H4=iAk8 zxB0Fm%V@7W^z|j?f7t#)Wcd;H$L0d@$*~sTiEyK>s*U%j8c>a9NE~=F5_A*PtK7Rh z#FEu79oeLlrCFv6h0l||H-VSzEvkToq?WO_Xf*lK3oDG^End&-*ou?{V}dM`w9%Rt~Bcy48u4m!9cU5phy=s)Wlao3Y_FYRl#jt*e?W2_{dnx&ujh_Dx%G#D@KZ)S2BH9^{Q#w5)?7ev z8Mf(+$n4JXCC+nt>0YI|rLJ8d6$=l{FzbMnG#~<30*}h~|4D+u1E9{B)CU@{##&KX z%1Y4}0&^=sTrfO8k=Rv*kA=s#|C?Ch9iV~{T!l(9xC+wn(yyWecyu4WG!lRQkAAc$ zI@3+iK>*__|La=xN|)bMG1NI{P{#}igBLjLZ(;WT;BOt4sfJ-6Ul1~pn0Ljrid?{u zwyWA#vm&X1KOQaHq z!XHY!ndOR2?(c=~QPp+0Qd++1BSRkRzH8JrR@h&6dt(}oAu0GbXu9;x_!GtPq@I#W zxIzT~vqJNA_&0mMv83b3A)2N8(kui3$j1{a<}bWox7(a+TSpnc3)Dhgw^xKK0S}%u zR)ZNKix2nYzSrBEpGH~k7$DSOU2y;7Ka+BF zTXp@cA#PvnidLm-qD(i(D3U-NqBHph)va@5EK>R$JLQG`v}pv{yj!W`Y}z=^@8WMW zD2Q+25_EmKJ-+<=`EkhM33KEDLxbS71{q^i-zn)DRk`b}U+KjKy z=h*qv)VOefgh2QgVQWth`;~6G5k6~!4vGnVVGoyyY&3Y7`PIC^P3~1Yize(S&BkGt%xq@5C&NnF@e7XgBJV=hed9vi9|l#hicrY(aqXNS zvwMu++05`IMjQ>SxXUVgH%Fg#n_3Rt?XsPitmO|44nnO#S`dl|xeH5CFr4eEff^+{eHwxp?$p4Obh|li{Hf2vXFh8k#_5 zXnkrm3?2}YZQ((B2d4lGK5`AE1sncwkfm4CML8zRk}u9JJWE#WBCHgZ(+Sxa&-HG! zyZy7(^Tm~KU)l1$tdIu@5vLaQYY)UtwhNT{K7vqZq(r@V*-6Rmd5NJn|8Bz74Gj3dnT2&+87k~C`u!-8D9JOUO>%NaUoyxn!bHQ zK}2X|E5R#!<{{be8OcDVh0{4PqL|XJd?!g5$8==Eke@;A`;kThR;tKm6Q#1*IzKIo|O9P?KNhY#Lp_~=D6EcH?^8)p}XNq|*=AsSWCRVK$6^=D7L@x^x{j3W8wQr^M-VhsoZv&yeh1ljbf(Oox(H1k!6 zlrlZ2HU@<#?rtvJe?n*dh7`@iO$(KBHivw+xfvLQw2M`o$8Cocc^_a}Y10iDt{Dy- zD_vPx{WQs^MalfmBhm&Vrr?zO#SkHx#a|K^I>OAZ;I8i-?=k)kdk`=Oi9Z2G?$&NP z#wYb&iwEKst?w#G@;4~#o9I^wYRmQI*WaI^%DyZ7H2naM=p}Tdj+ds!mT`R#GW&r_ zXEqUP+)OAMIrJ)TcFKX-It?U_}oZTT;*3%OKlI_^o zvYuR9-tz^?6=a^ehNm7!!Q3*6)!qC~e1RS0@(>_)^@}9Mi(V-vlBEwQ@-hdKORTi5ETw|<=1nKjZ=f2jh~ z&MGDn>61;NqXnObaE2U%aGv-39dD4yf;yuKmzP2Sj77ZW1*x>lvD=3QX+++~MGusP z6AsnY{AQ7%-x8(^USOwFP%E&ll3SYu>I?ZE_j6Y|{b=KSwB5}ew2X7adH=e43$H7A~CGfAsn4>oOul3D>D0}1=4 zZXjPJ_gN|2OZ_S3u%@7M?yctY>kG4WPG%*;M}NNzq3`y){zHmZF15$013xva7@@*6 zuJuEE^>WTX%Fw>qqz#y#g!>-8H=;9(Ni7q*+YT5RVb>xE#G~P(J`B3Pwh5n9 zTh;o}aTo^SpGpf!tk*;9YLfeWQ9D{gYg2zpKH;R{r-=XU8?DQ#c09;%hUnz! zxK7iphw#}%j?=J6P3ipT8I`m7)0RG+ijA*I@gJ#zgi-NWz!@1C0M}yGYe|nQz~Xwi zbXW0k|CCaOeIQ#z-;Y{|V9|Bh1vm%W(}cZ!tHBi|wEyV^kPWyVSGZiI$m#AR9PYdS zJEl7dJy33Q~d6uH?LB&%1fgt>xGw=8LKI0qNHvOtk@6Mc3i87S0m zW_OGHmP7or4dpRvC|r7$p(jI&W?Lg6XEesQMpbF8{0-BU!5maTu04(d=tlwqRX8n6 z+ZaI_Jp(7L(*Xic(#mb*W^2pYZ3svu`%1S2Z#01MtTfYNB;%YlW-fLen zJ|nd~VlDy`=W)_Y9*n_3jy-$un}*l1*ff|-UyVNc*Ve_SCiWY=3NRmn*mE2vYE67pG;6c$L7cBTrTUf(hUXDyDOw90>0tz0mR{00z~hpgoAq%mAgbPgkutw z?-`YZopH+Q_XQj7uR%NSzUn&~mIwyr+j zVj){97~tu2f!r({;B!=i)xwG_#>KEDlB*@_ba_n#X-#;7f;%7$qdh;9q`QQjZJ$07eKQPVAn6)b}vxo6^T@ zJpcXLO@NuwtUvPFtFY#C;${WSA6m}&c` zjyc6^T`H)nHJ8VpO_y~J9}PitX{~z~c=HMxf_yznN3FDW`7tAdJzo`@ubyH(f%!Xa z-N{jR4kh_C!+HNmQqyLN@>?J1g`()GHx_DAtoZ1PG-hv>u(v!f;z?V-p2IxL4ER15cE)%R>CE$2%@m==>36u2Lou&ExGoT408;9>H@;*>>G?ne^UaS~% zxO7k%X5%$o1PpyZ`Xa|FH33snmEXF2<<&zI^LXGn)j0Seqt-#z)Cnu434| z$9uQ3u$K79vFrUIaZSxMAOijUGA*DA!;2f4kx}49ov&gJ+t?tetnSjl<6^wAvW#`< zWkAN}Gm2vc@=QuU~`{_%)~2+Hf*6N0K2^|y*c(DB=*UyF1i6h#f;NLjME zHqMFq>4XmrZ+I>ZoKL3o1G+;|uYV>PY~HVM!vY=>i8$AGXPQ>WF&REan_w@Gjms@3 zgkc|X9H-obV~VJdH4vjAB|#%YPh-CsU``C+^Iso)_)F$forK#6L$J(;GA^Lhwb(2^ zYpmkQ^N`e5&D<->y^^xCtO%0QwP38dm95mij@oBH2K`lz{WM!u`u45z`ZLKK&jMu@ zts6|uw!Qro#U2K^+%fvMOVU@Zs8}b&zjQ=AFUbC`f0^xbJ@5CSbfsvWbluXJwPc;sUv>-{t|7o zqs76gu#fZcRGo?kdlOAB>Q1rc++oobZ>*_KcF?iMnzSJcSZA~GxVl-Um z0xF^;xz6JTdo{a-(9MPTm2m(kA{I$_5VYNviV@%ogWATUmi@XFw6<23o7o*-nzj(u z6Qfio8~&7j$-oc@VmSPE#dSoBQ!xpi zVDai*XtGpTVKWCcA0A(04VyY80v9SCX)VXlr@5+9jMuLnY#^OyHqn9zu$TDPnV$wK z)r)-;tiO0*Oet5VSk8%-7hw*)ldoi8#kI0z(J$B2X85?<)}^!Z!*S2j%_$)j+l&CH z2sC|meh!4{>{P}KRey?Y9ATSQn=XN!Fat0En?h^7{%g$=Hs04nWSG$gMmzB5UYd4U&u;*>mFBiQ>i?+-U2a?A ztv26h2E<*^iQJa9*!WvkKP?J&S{63&G#qYM9B1lAMjd^na3XoLOj{%eK<$nMZ)!4-4%Xzu#wvrKD`d zit1JuC$?F3#llKEAjmdiuMIr#lU@JLbr1D>2UBz^a+HA&xBwdP_&{+Vu1xno%0z-_ z%&12}v)0n|Px=pV-o-Cd3OFfIbPpMdP;v!8UMc!mNT=}ZYTtNlJ`c_gjYOfLiy9-7 z?Cp|U1{>dhjsvN{)+Bp%F95owpZsG|*hw>G6`Drg;>ov_~(MwG-lQ|3fJ~9#o3T z<%h$bBVOJCx#~2Hx&qjn-tdIq`vA0;B7(+<(WyvP>BWZ152r=A_Ec0D0s5n#8x(A1ff$otni}ek_YIVmcCkB!MFzsIMLccKZ z1JseQZbqgcf|FJSjwsWGI%sJDbVNLhp&&pEm^*kV{Xe~IM@0S$o+kSI<3;okHKF)YWbu8B%B!zZii_8F z6I0UID1f$TCN^C^{vq;cRrKIg$GU?&cx;v)$F{Pn%CtMy3P2#0e9x~~x%s+3DHTyZ ztgeF*Nw~clNT}nV*uje^!ncYL^4g=TwNG;cj8*$(Ma=Tb&Z}r#6`yEFf`H|CLDu$? z@5qZ($di{j=wx@ENMm) zK+MszX5#}<0WseN>g>?2wTxMCLZql;4eQNpz~DFU-=o6`uW>-=>TvukAV_`ShIjk@ z3;jG>I=Nat`#|aH<&_MlpKSTL2yxgG0g2MN6_QdSKr|m{5Q~A6AT%~=E&{3=1oFR1 z?ja?xNDQ+fjFV9x1z~gNeNw)Ak1nSQ(+OV1jf|+9WfaP_(qw(t=YIEWrP*;w=PeRRrN{ zYd)9-ClqA?3h1M>c|7w!#Of0SfR8x^7}v1-H~(oedt@5ufW9wvi5YgbQhrvF^lva?%gEFpog$FoWPJGhhJ7o1dN*WvGbjrN&RS4P z17tr3^r%>T&Vm56BDABt`z(m>Iqy!$+I;Jh=x8v~CY^EXwr`YhlPOn>gbivVO5A(F@5OpyQGb?fE{6)EOwqOvHw14~a& z?~^R;;ZS|1iZSk6%B7GZ)uoY0*%n*i)hsLQlyG3VJPSMkeCn98$YQG!4upfQi(kvdjFvcNei&e?Bk3A!7dno7Cj zeu=X5k&u@X*ifL6m#ASc(Vo4;+&yuWg4u9NcSZxVX#uNKy<)uqm_Uh^az&aRb2!@% z72M#=xN^i#YsYo9`P*q~tyjJy6wx05Q>ETZM1%(KxJH2(^b#$1#nL=&ciFh=zH8h`1zD!1PAgaxCjSsv7 z^xmsMP=Hf)?1-G3($suM8fdPp_HBcFd@J97Z+V^@^G*|(bO8sLbXj&T+U}!`}(EI|i3rUkfN#nKkLt>92b-c!CW@`wt>79Zj zW%4^3Y=suW!#n_$GOUtvXtUEyuApszWu@I~g9+TV{i4pqqClRr0{VIk{-d>ch33)o zQz0xV6W2(|NS?qvXmbhSTK;eW%BI$5SyxQ2;D zl_*o)@90R@Rho3pMBi_!nG5}#@wlYugghlPReqbmhUy*7w$`7ovN@+teCch9gtpFh z;kasCj zM8$YqF01VArz%-yBo}pr3sRIzG7j!YJUrM{&RHxny!m_{liByygjXvkg}?xK2O^CD zWA7eb01e68jc-Xa5StB1?T8XP2i1ogqGV|U=XRJTCf_Evpgy-0!}sxQEqoY#*n{#TMI0aeR3W#+sZFnK^V7^B0XkD=+&2`5{ zbh@r7fvK?mu~(PxsJ~P9yr-BO(D}ckUyrGVd|XgR9sp0X$F{l#Z2mc=lHV~36pf(& z$=8vjCJHt&sRq+ANtKsN00c8HF3;Q!0HUkj*b0youspYab8N*6mIkx1tE4l?JrqDI z#yba30ap=vZR@-MG!#Frru60l@pVF;J!d+heSEhLn*KNx-Rq&ul`gXu0$N9_)gov` zi@0Sr?OohT7tk0t4i~BNOQ$!G44}^huMOJ6fWe5q4kUhgT%VQyS=fej2rwAp(`4_2 z$LS4W?;fQ|AlOuNHs>&Tk-+!c85qnYms9y4O``uhO`_*3$>wZ}ctkCtEnM5&I-EGnXB(fRo#lh3g$5aH>scNjsEdE-KS1<-9W zF`;Z;xcoDwZ#*~uv2zGuXZ=n^b8{L3R)dH~sO;|~7EWV)pKZpi_+9paW>4c)(f0Wb zJehp+xCS?Sj-HW^YjB;Le!q$0{M&hzz($Yc=AgNJWezqKW^CyE6u7dXm2v`Hu4w;T zQ?3z!%fW3T9su*Z2=Q79!iO%0fjpvbRra0$PQW|9a{MS2o^^=4f7GKY4{%i?L6KvR z@XA6!04^rK$xGIHdq#jF3yN%C?sP3 zh*m>c5LDbQlhZBL5pG2c>%wvJOJAep*xMfoY(V3c?jn56ferb`(Cl%|MQw^61SB50 zziA^MGJrdgS#0v-aNW29>@!)<;eR@6{nP)>_@_m$J?z7-yzt$rIkfVKb8oV|9EVdh z=XtYnC02G{ai}J!e-A`0W|EueIBK<*cj(Fbf`u)FQp{fe|^hc@Y!}E|g-rkOAp>CR)m`I7mgzM?t5I|ny3A9N;~?KLrb{|s~)M)fFaH(58RK=lmtw^P};TTf0a zWZ6h#G*_*kOvB=yZ?9-%C;pW7J^GS7^TF}y9AD2XNXXcOJPWvkFcxX*Z{CA`KsxfY zuQ&64j!u@$>G{=|SB|K5^+@W`zR^A@4VXF}#cBZh1WT*|=b+&?YO|p;VG3(@swGpt zb%We@>6PK!X06nB0xYb531iYB`W=wulc5jCjZT$l zdL$^}PchG%V;Z~Rl;DRB3)r`TKhDbj>Dbi7!+lNe%!`IeN3E>4MeUolWlX9W?*oZL z$XjL&-`a6>#<&CPfF3ba#WwkbbWvM31N!uQJ&qrFK>j*iJk$R_@NC+8-f^h z{7b~;Cx}MnID8_5HNAd9ZXKCsY`1Uu_p7u6e2;q>nDCny%XjWia)vuG_zZTI*a&Ix z$^nX3a;qwuZ#HHth)5}5IE^m85ZEtlBMajY|3 zw046LFMn@xIlzO0{%IRvy! zb`_f3Wum%qWogIkP`xPkWVlJyT;CxgIHo#&Pc?8sWoJqxDM~Zcg8m=CCfRrswYNuf535ss{yf@t~6C@%(-gf!pY)-RL#50zH6LFkUjl^ z=&bXI*bfAZw5xLX+wrd;ZeQYivctd7D$A*d)t{P@_A=v=aQ;!FlU06Oqh?z*x(r-% z(2Q-9DB~J#K&e@nTLa8;E|;WJh0_k=wc4O?!z?GEJY#ogZ~E)lh0MaY!R8j)sIMbd z{D#>%5#x?zxc5)u0`w24T&QCZps+cETc6l{qxo7IonI%IA7YY+GDY`vd8dud${O%E-*`?ZUjTPyj@Ff!4FEG7Q|f22)txMG_UEQpCvqQTXk z37F#oi1myj7gzJwUG~IvI}g&-T;e%Dz9hHR+jl^yOr(~22rF^Oaef6Av6)XO^M9OjGB4$)7PYmG&VwPzSCx5eO;`0(7Lq%_WC|G-{(%b zeZShmE#Bkb;XRN)z}Z4v%MltC5q-8hy}DHp*Y(+RA-Tt*f>-7Oqws@n>i&&tedR=DmO`PQ;SYvplrfpaDVFmu<&R9)L;{D~J43qwPP*rdI?W2gE(TU9uC`MZ0MD5yS?MKq0(n+#m`4*y49C%tiU%GD< zE$=>4_gM|-1a>OACPCkWV@<7HU*K@~l|eG8`Rvg2_x0Zb)OXjXq}IxtnpKvwy{JHE zomxA7`X7I-sx)etD}Vy6H^k>y#I_h539}#2B}+BL7`wu#*Kgi~Mh)v|j=66&)%SJ~ z_mOGUxt)9N-r(>_gX?y0NdE>1+|5BT+mDFz^54xzxbFtV*PbAOrYy8h940<0V_lTQ zxtz;$hP^w{eLd-{q}WZx&K&+F>Z~~-8YF<{+8TU5Dk#va-(*im{oCtka+a$Q-ur0T z4^|D}EBoJEO}x8}3-HCJktPjeE`wqM(b6LhjhGf(*VQ82XFd4X>bfvR+`7MjOXGBx}IF z>s6s8(krS_Z=8;UezmQ)WAT|c-<=|?0e3)w>vRp#k+=-s)SG~NU{6T>PHUVtKbIas zuw}kJ<^0LDY5SjE081Uaj!~M!f&(&0&aDNaR=H8^&d$#d_aRuktNZoZX6Jjez_l+% zdL`j!Kr3t9_1@K{v5M__HaRQeMz)VXa-7V@$yRAu3}pX1Z(8y;NaA(!Bk7tl-XAw$ zcr4L=e{--7L`~{miv=eim}{QoM0XRJ@rxg(96*|)2=v7(D&oUgbf8n3e%eR4>szaY zLj2;L$W^u!w&new1s|X(3P^?Bs=0x(gT8snanP{y(pc`l3b2dC#i+*foe&^TP*wD5 zJZO?}8j~wy^$F(}t{Al58_I?$HCHB&F(M4r>KPcd`U69nJ=VjK1o#De7^NW>8 zv0E(v%lU!{5r(Y8oOBzAXeHgr%hNlthS~A<9~|o2YRP%L=|Z$8V6zIOgzYbV2K0It zfAv=ZLyU2_C@6LEf`u-KxA%OqD=WEdjWaTyYRFntj$GGwmVp_q^KK!2(pv%Gil4vx zb-zEjyH{t^-N0hVgb3bGmwlA71HLM(1`GI41|oqL3S_4_TXP_Z#L4nIPq&jh_rLK& zFzZ+ajxlA@OJq`3V6YJEUgWZ=l4gwxx1WWs%YuNyHD1LJF^3(?#{knhU^bm$` zeN0zC&|A!z_+5m2^FU>9uUhMPEPWQLP$B8OPkU#usNl^_xTj*^av~@yc}+pX=OI{< zes5g|jUQ6_f4RW9b9yi=dW8oxuxc{+DyO>$!n;<%4M&*h6ry-EGML>B-U{qaOQQUn zS>qKRe-nvZdRo2qB8#2(*_717?ZC6u#M%aeglx^n1UtJs?7P9UJKRUjcYeQr#i0xJ z9*%#1G1Lv*4K9$13+=O<_2Oqy=sx56TBmPlSseeOS4ewlt9jQwwg1Rk0bgwt4LS|4 zh<+E=orsuQ7Q7!$`S!$k_gmCIn8zYe9!SBy>ri25;Eqn5G6VNbZGd(KZM^dT{+6eE z-G%?t=Vp5|6ie{C^&^s4t*SC)-#P|=nueR*UQ(D&e0^@O%=Kw+#EO6V;LvL)Z@F>tfd z^F*>;VBIlR3?DX>vep7GcMD(g1^@hgK-?z%S99HcyEPs26rzR~31A57XkxppSU_%$ z<4kii%x&bu!`ir&Z%=0Q-`@b$tl#bXZ!Unc!2e``qC!4DGL{HRbL|}_9e4^}nBJVX zz9k&gERK4(n}40=egAgHb2VX0X|d@HXzsd{^w;bvsF`|&@pEdU5>Jj+`Ub}V|(r3brUbtGvDN4nuW++TRmf#H7> zp+gQA|LN?O!imph@4`-~qkGZBzE_NhqlGeO?R4O$BuqT=Dz?}xqW!b&%#^~3$kpp3 zG&@TTD>km?jSvkzt<+1&H~78rh6a$D!3Cl&+u`H>PFsRmSMLEh9OeTwYYO7z!U2O> z`hin1g${^sNncRfFF7OE_vlB}Y1apFVpl}r6ud|$o5rzWVW{#QUk5LIU7|eepQxJd zyAfKie2t{7-4u^B!$zRFncUjkk`;7`{3&O(fA&QReH&5wvm`$H*WaFf0zl6#`C`^z z`{K9%0x*3@_;)Yi0L;*HlcI-oR&WfPCUM4?@=a`kA|^DZp9A^&O)B-jmqz0Un{M^! zgUzx&u5LR;6n$YFC9Itr4oWzTW$=-Q22)eZj1L#86-Sq@63;0XR>n_<Im20YuYpTpHx*Xzm`P$O#Q{3)9oe+-ibQe1( z_NRW}i7TP?pP9ylN)GDXTd0zw{O8jKT zD{+K(BAzJgeKukVmQ1nizbA6>c2#;h-5HMjD_UWHab0xQnLnAhEMb{pHK721!VY6g z;uX+$A*1fL~@=) zBKb7PM&IN(>>}hNK|IRR`MfT)5`?Y}b-4~wW^O@=H!&OxG8EZ6I|;&&SISLSv$lt3 z06xhG)HK6DNCYVN-v#=vkI3bAJpeQnuF!+t_D00aD{oA7%53Hpsm^O*VkJ(Yd{{`|b=}iFgmyOaJybpu9q|_1xotB7YuHjTzJmoZ}+k5=!zT8n*wt zd7kbcAw&KT0(<{IN4|Z=p}TgaBgJLU0f-JM#a$;LSuZFplQA>9D^;=ddt=Y7U%)6v# zZ>_aswZDoNIR^bO#^yjn@bm2ZO7fMr!`7On8f~A~ilJwK5AbA@8a;`;l`w3>K zG9*8@{#r%>s-YNh%IzH6uq<`{Zulur#lm|{h*pYfDz3;!Y?=;#p z))#;DYVWzR(#qj~sIDt-*e#qHuN`NV4$Vysf}`=}k$zL1aMr#Y-Cst3y3{Z~-D{v|01A%IKP2{u90c6m*o0L1M?pdSb1Jg>%I(i1 zs-|B}JtbTl9Ub-OR3G}2qt!DY`KwNC^*xR8 z&sI$mak=^9N}y$^?Pwaf*eWF?f2}f1|$zA zzpd$^Rm-*l;7smv^}c!!ekaUO_;9pe4Roc&7TvliXLU&zXM9>(Rz^vC;VhQ2%cxz* zC^&>)zJT9qa2o01S2<3SxfJ|j&Tu&)Pif-$VjqQz@^YFjqss?weDc9=;LmDB)hGWL zdQoUkul+!#nmz3CV}0Ti-L8S*=TX!8h@UyF8F`b8RxFd{a~C5n{*7D`r&$7=%)dFQ z2`zdP%pBr+CtpHI1Ru3FeN(#IX><{0G~xFPq=Q}U5f-fJQ9&Hoh zJ&NkIbXjP5P*Zc3Q*&U=n5uNX+h8zS9!xu-r>Ln%7B#}sDa4qyq9WZYU#$u^|{#)Q?MFjRINuq zfe$VA)1OBCxbhZCd!aW8r>Cs*0sDa^roOagEq;cwL_?OJ`>5s4FZ zyuwd^E6@E%rt&{(=C3NVL(3zrnbpv?l-Q1={I#4aZ76`rXmxuq@8`^5Z2jAM_xy%S;itK^whEVyT2_6=A$xYWx|}Dk{{0 zr@AY&RCRRsNx!7(#(%#q)K5tAE>+-CwRDS&z2>|N1O?ajUs#?D3bo;&mPWV835UetJiS0HDMiFiVK96s}t&SGi0=Gi0=rF-8aH&c7hK5eH z`?i?%mFL{qY+_W~U4ZTcqYTlZ_WCy}%wmO`-o;KHG?Nvwbrmas1C}9k$GUiU706e_ z!d0O2-0xx^26tzKU$?&~0{X391s7!m6u=^}SZA%vgFw{K`k5N zNf?K?*{!y_b!-y@3X(5z1*_;mC@A)dOY>)*R%+npEE5&c(T|&dHtWXc{YUkU*VuIZ z!4yZmaW#&!)|^O3n$h!j7!8#a2*@7OmheDqwET(Gae!n*rzPg(yP&*WQAGtG76YC4 z^(Ou4oHMr(w~S87ls}nF`(JE;mxgpy?)qRx_^+~0gq4Pz{<&|m5P_#I0F`!Q?tQa8 zk=1Gx*B}NBKe&sK3#IIcxgmjZiFeAIlp#UvNjJW^aJ1HR*#C zB_j;ujj~KN4P21{eGwjRKhgq4%-7!qa}2B|<9y#p#Cgx=pD1(IMMXxKLN#;5W7Y!6 zSFO&O>x0T^^}0a8c58U+HXy9&hzLjmYyvPDk8Si<3S6DaDb?A(Ox zI8t`#(aRNg78J&27qJ%*l8~FpB6DZoF84HFKH+1^uP>y~smcKZM*47c(T6?q;XUD{ zbrcnH%E*(~c7U-fRq43doUEp>lz@SLO-LZjy;~qtfJY(o> zI2~!w8vkWY&QDl_5@QOxpMcK}T}kjJ!G{z_Mk_oZJlB=)EGSMdP%|to;wL1o&EaH+ zrI4t(l4yYJxrV3uRS*ny^m`i_(3obr#dMxSb6sSe;_M9Ur&}ho*86J8LysFcu~Y)h zA&y2Ex&o|370?y3r6GHHuC~?awq?VhPZQoCXYTk2V%Qr%!1bN)9DMN`{CUIYfoyP- z>yae*PK}yC;7*KAT8_(h`|F8I|C%S4cv@ki>WPa$3OluO(zFIXR!+siJ754EN@r_# zW4!J36n8C2HB(;~7eSO4MFRKuy9rR$D?mIeGB3wThxbwfJD0Yox^L+Jt1m*cIaR>? zs4gVl9k0XH>0t{l7`+eTZZT^XF0U8bo69r8q@Vsn_VZ_s@**_+V|hXq;?9Tq%1_P39-bI}R;{ zM&4Po&vvR4pgPQ^K2wsPE8G6T+#}ABfvZCRH7UvF0=eL_h&640Atu(H0bk zSM76tAiuR5Z{qj}lK26Qf=+W{>UaDok#UC5F_`q3@m?-GV14&8!xonP2=Ys^A#5*bMxg0l|AIh28rc zHB{`roc*uQ*!$W`jML-NeL;%z+7%j0HJ;4ht_^cn&E>MqQyW$h#hyls0}vhb;6Mw^ z(LYtm^WAh@`EN5TLTr_?KU{o(@lx?&%32c@w?KVT*XmYC>g8%!60Hys#-b zHvrrjOXJAbW_ns}8>=T8%93?1(4qHH&G&BLPaBj63Bn_cd*l1d!fB(@+i=aXAnh2G zl40t7j~Ua_j6HJX%9McN zzyxCnIo@k!&@7wi_bi1`xS7tp-=5FgKobTV9d+To6Ge8U)~lGTS|bNlo^epo&DXY1 zoyGfMUjY^Yh&S!pPWpq1(7DH$2zG`)!v;rnaFA6oo!TeIL=1=%P4<0%aM|aVd8h^V z|9iKMKDFUs+KQ%eYLvRC`(Q~m>P!gaAc^)o$F%A%&-z&fTwfp!Jr^hi%NAnWzY=!! z&;tu_LEKux!_Rb7_YhX?QSBL(4rg~3*VWr3odz?YbF$ZM7$(r)EY21kY}j;?*^Np| z{~PN1=PMAxjMs8+xa=K9*mU-!t|TP8|7(Dq=Gnd2I}j=8_-J*svCtfyMJU1zR7naxq31^h|r8+=CukYIZ(R&VYB%j(3AYu2fxTc4iPiM$(er`5Cmr)!|^d_ zdu@>ZePI=?S%X#C%<~FJYq%=2o*(9YGF|bN7xg7Ml+kI$QPyL|W$EnAUwIW_1)Pw> zqtra0qWU+ct82CXyJBrzD}*;Baw7F8MLG}W?kQzdGBh_=jvQ92)aS|FFr9RW7cjVU z$zoL1D*Jx@A5Rud2QyzOJY6`p!9gnX{l1|K03K0NUtG6gul$nvlm`T}tFrK8)J%Y~ zS7v#6wkjZX5=txDMD?t4xro2exOscBRP+w)-U6;XT`Q?gOt(kH>kG{U9!}$6G6%?2 zkKs3A-x~kn0Ju5a62k*5nI$?Oce_w5UopJ;5TO2r2L5u=GND3thqeK1h=Z>FJuS-L zM)q&>pu@}Eaizm$TVqeE9Y};-j)_}e2N41(gwuK8uQlBrR!At&+Yw}<)6ANU=wStn z*C{h?b=y|Uz_A*IxWZq!_@m|E-~h-FSwpz$H0f#2hf3?_V;4$|!;2})<$*$oxD19Q zmCq6-LFg&Z0rF5RO5A6U9Olk{odj9VSUykP#CEI(`*D-Hpa=k`y_+a;NPKxv9-KT$ zgZuYa9S;ET`2%g#t%-xu=#gKtf-vo56xbMGOj1FyRDLkLm)O{KF(~hazEqWpMm*aSrlND*& z$2Fwlq^?fn1D5d5EZahO=qO5LOlG-VW)FGJe?6gHMlDc>t|W+&=PJZ^JSSoB6U6O~ z_rFMEism-{u++1{)@8-FH8&j#P)ik64L2*Nps+%yHX9v5ENYl=CG^fiY{{W$>Bn#c z*zxR<{^VVVT8>hSht8?Bfmt5WdP3mZxcT*4rS(091aU%uD}=Tjr?ulA2kzLcpE_o| zD)teNI=bAIH1}P*DSmY>P*7OdQSGQR`Q~#OH_)}7ajFopA{oA*Z1_Xd(?yJpXGX`! zWlMyjv_>?2b5%zC2+4<*&!x-x8sJ7kdF|NVEL_~T-%&pHmuA`8I=0c!PkYhaA$qX+1evI zd6 zg7<7|x+9Hm4&*KV@m^If38i=ci<`h25SUX*nKbY`OL=un=k08FoMOyk-}voq*&R!T99KgWrd4@d0T=4cc6aF*l;l4` za~o*!xrRL_%jXmqPl&YHBxKtAyS|@=(OU?r1~R z3-S0HoIM6D=eA_=yPt_)E<{`OC59}NM|IzzZ;hpkYa>Cf9Qahh`zjJ9b>=1?yaE1rJOIYyWOYTyVaTlO@6-1%WLk6*5wimo{@Yv{BSJ+5>tW1GZ zX-NGP*6$coD9<$Xv4Al>WIc4IQzZuh)X)Yl>dKfc0soC_U>kNFDX*~DTY(Lj`1y|0 z`5*7rPQG4hYonTX{(l}~s+jJBNU81!QjGyZ*^@B9p$?MEu_Ws*gjtf_DX?Ts)Ckug zknR7;Dq+lEVDM#i?njg4P5MYV(HJjy9+6JX<}x$4|J%Iro#=pqqT9Kmu&{;g^S58Y zTHR9(IEp}yV$|e@zWB}0*jVW_Njdl$$qclzxEc7?hyMU}`uj@ceeO5@d@cWUE$y8C z-QTpKP}b*`x5Z_L>GwAGX}(ltJoIUD;*@+ z6WwgL-$kln@nh*;_eZ)DgZ}1*X^DgVK>d2A?YC-2j-I#Y0@M$eaeZtyJQHs{ zk$ytJWCi?9S=OY4IDouVk?atkfr_O3G*wKd*MC>rNvga$vzOruz3gK$(=RZ||M9?J z9=xp?3OqlY8FD#lSze#28r1OK);QC2%Mz7#0K-ERaz$b3cqRA+LxKoo$=#>!HV|Yj z=DjlS*v6x?gkoXc(8ByiK=2jCREaNia`NDKNuy^`OflFbhO&IiEO+y4DgyYI;E#k_ znJMv#9MpJ!7*ItHv;>}>Q|=@5hrW}`9d4%-zozH;R04$B_k7n?t_Q)QD5Qsm5AL2) zS#K=*eP#|_&h7NHGphBH7Hn)1L8`+pZ#iuerLkOW<0&{{_jtCD&UMw*OY7EAVab3T z-C=M)wp?g?)#4K(D*R|TBqi8Zwk6x8=iVU$EQP)xo7>%nf_?GpQ61YCh4&4!p@#H1 zB_3E9haSJ#_17TuBcR++2TCf#N=uPK*alr{&YpKl`5wOFNdWS=A(Psrss~vNUuGb| ztRY23lmYL97gQO$ZX|A{W30)7tmcz2LJ=93ge_C-&Jd~n4uGKR1Owp zsN>mH76+FIPi0LGhmSyp`p7^$RKgO;!Z`QCEw&uHcUut~X=+kz; zQ#^i+nnNhR@D2BG`;yh_ubSnR3rr9EdqX;g2nAme(@BS5g+ z^+J`eHFB=icHw=P+Me*+oZ|06FK$yt{s@n~5FB?bAiIqeWsCK#QkQKWe}RYC7M#5_ zfIxAHACuK@EBXEvl2DFbG2!Ey;3RQ(RguvC4$A6 z+nN8&w~`o?C&5$LCw7a}^;vd91Qs@kB#!T*67Bq&&H3ax8H&;=r-WZJ>BxT#{q~s? zRm4}rb~dgp)+LF<06awRxv5$R<{BEvtZ{fPVxz^1^i#BoQ8ccE)Ff^teB3X4rkI_n zsh5^@X}$hE2`=V=+h{{Kva8J2rdzCF4j#00OUz%#9YFiROVak(OXz)Qy#%Hi#B@m< zbaShIl=0WCBL2ZiH|O;ZtBwWq8F<~}W(3MqBbNlzh1ig{;J;zg#lwH&l!dsb{&Ez& zjv}3172-bb3z>WO-*NiLh+RD-;uWr=;Chi0yNYUr%#!|hoCe5~K}r5nLSiBCGaOi0 NO7iM*Wil3F{{{GIyypM_ literal 0 HcmV?d00001 diff --git a/doc/private-key.xml b/doc/private-key.xml new file mode 100644 index 0000000000..51cb8c5a9b --- /dev/null +++ b/doc/private-key.xml @@ -0,0 +1 @@ +7VlNb6MwEP01HLfCGBJ6bNJ2V9pdqVIP2x4dcMAKYGScJumvXxNsvkw+SmgSVe2hMs9mbL839swQA07j9U+G0vAv9XFkWKa/NuC9YVmua4n/ObApAOjCAggY8QsIVMAzeccSNCW6JD7OGgM5pREnaRP0aJJgjzcwxBhdNYfNadScNUUB1oBnD0U6+o/4PJTbssYV/guTIFQzg9Ft0TND3iJgdJnI+QwLzrd/RXeMlC250SxEPl3VIPhgwCmjlBeteD3FUU6toq1473FHb7luhhN+zAtSpzcULeXWU5RluYmQoQzLRfKNIobjtbA7CXkcCQCIZsYZXeApjSgTSEITMXIyJ1HUglBEgkQ8emJlWOCTN8w4EZTfyY6Y+H4+zWQVEo6fU+Tlc66EfwlsSynOF22KJ7loYQCvd24clHQKL8U0xpxtxBDlolIA6aBgJJ9Xldy2hMKa0ko3JB0sKA1XJIuG5Lmbc6hx/jT5ff9oaWQL50jzZsqoh4Uq3dTUtBiAF9AmxtaJAVYHM6MBmLE1Zny8EABNOaFJ9nW9sfQryfr4fN7oaJxrNOPEv8sv1ZyvSFwPxGuSLjbJNi85GzcmGCvgdQvAUQk8YUbE8nK6a7xhX7uKD7JWo8XpoEVhDEeIk7em+S6u5AxPlIiJq6PQEgWMraaJjC6Zh+Vb9Uu2bUiFw12GOGIB5pqhrXTlto9SczSomk5Dyw9IJsL1dku1C+9SKpYHR5Fvmj1VhE1D2ukbTkX3WlQsuGmErbqw4KLnE5oHBDlWWbt10K22i+xQVgiANrVhaT4g271g22xfKI3kTDQKi33d5rY7fB4Mmgxn5B3NtgNy/5D7EKOdieHcfyhcRmiGo0mZBauwW+XBe+KlzOblSoxSz7pjunvj6A8RgcpaY9Mw3tfZ1BA6n2f41IOt6puaRAucrz/AiSbUNaR/Fjxj+geAxk668PJqRLiPexX8QPuS/OjVmo84yjhleqV2CXac9o18Vnb06uEm3e01PvWW8XZfh4iZFdn+n9mQTLWSCQhcjanRntB5ElF6yl9cQl++zGpfbo7unp9VZgE9M2dJoFFdbRmc5cRarRMLLd0P3S5KnAEoGWuUaHwcTHPXhL/U2q/NjPdF+k6tIHV6J8AqeF9PBtzyZxu2HLVvaQPdlqHhShswaG0zmLQdVWsRbb+lPV5avf44Qdpm2Vo/67JLnfb+oo86RDeNKxLdHkr0208TXcXGz/pW0S066C+61SG6/S36x0TXC7VTRP9SH43VLahyzHZpc/xHY7DfUG85xWP1A2MxvPoRFz78Bw== \ No newline at end of file diff --git a/package.json b/package.json index 35ffcf5df8..91167499bd 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "libp2p-keychain", - "version": "0.0.0", - "description": "", + "version": "0.1.0", + "description": "Key management and cryptographically protected messages", "main": "src/index.js", "scripts": { "lint": "aegir lint", @@ -29,16 +29,37 @@ "IPFS", "libp2p", "keys", + "encryption", + "secure", "crypto" ], - "author": "David Dias ", + "author": "Richard Schneider ", "license": "MIT", "bugs": { "url": "https://github.com/libp2p/js-libp2p-keychain/issues" }, "homepage": "https://github.com/libp2p/js-libp2p-keychain#readme", + "dependencies": { + "async": "^2.6.0", + "deepmerge": "^1.5.2", + "interface-datastore": "~0.4.1", + "libp2p-crypto": "~0.10.3", + "multihashes": "~0.4.12", + "node-forge": "~0.7.1", + "pull-stream": "^3.6.1", + "sanitize-filename": "^1.6.1" + }, "devDependencies": { "aegir": "^12.2.0", - "pre-commit": "^1.2.2" + "chai": "^4.1.2", + "chai-string": "^1.4.0", + "datastore-fs": "^0.4.1", + "datastore-level": "^0.7.0", + "dirty-chai": "^2.0.1", + "level-js": "^2.2.4", + "mocha": "^4.0.1", + "peer-id": "^0.10.2", + "pre-commit": "^1.2.2", + "rimraf": "^2.6.2" } } diff --git a/src/cms.js b/src/cms.js new file mode 100644 index 0000000000..2f2d9c7dc4 --- /dev/null +++ b/src/cms.js @@ -0,0 +1,97 @@ +'use strict' + +const async = require('async') +const forge = require('node-forge') +const util = require('./util') + +class CMS { + constructor (keystore) { + if (!keystore) { + throw new Error('keystore is required') + } + + this.keystore = keystore; + } + + createAnonymousEncryptedData (name, plain, callback) { + const self = this + if (!Buffer.isBuffer(plain)) { + return callback(new Error('Data is required')) + } + + self.keystore._getPrivateKey(name, (err, key) => { + if (err) { + return callback(err) + } + + try { + const privateKey = forge.pki.decryptRsaPrivateKey(key, self.keystore._()) + util.certificateForKey(privateKey, (err, certificate) => { + if (err) return callback(err) + + // create a p7 enveloped message + const p7 = forge.pkcs7.createEnvelopedData() + p7.addRecipient(certificate) + p7.content = forge.util.createBuffer(plain) + p7.encrypt() + + // convert message to DER + const der = forge.asn1.toDer(p7.toAsn1()).getBytes() + callback(null, Buffer.from(der, 'binary')) + }) + } catch (err) { + callback(err) + } + }) + } + + readData (cmsData, callback) { + if (!Buffer.isBuffer(cmsData)) { + return callback(new Error('CMS data is required')) + } + + const self = this + let cms + try { + const buf = forge.util.createBuffer(cmsData.toString('binary')); + const obj = forge.asn1.fromDer(buf) + cms = forge.pkcs7.messageFromAsn1(obj) + } catch (err) { + return callback(new Error('Invalid CMS: ' + err.message)) + } + + // Find a recipient whose key we hold. We only deal with recipient certs + // issued by ipfs (O=ipfs). + const recipients = cms.recipients + .filter(r => r.issuer.find(a => a.shortName === 'O' && a.value === 'ipfs')) + .filter(r => r.issuer.find(a => a.shortName === 'CN')) + .map(r => { + return { + recipient: r, + keyId: r.issuer.find(a => a.shortName === 'CN').value + } + }) + async.detect( + recipients, + (r, cb) => self.keystore.findKeyById(r.keyId, (err, info) => cb(null, !err && info)), + (err, r) => { + if (err) return callback(err) + if (!r) return callback(new Error('No key found for decryption')) + + async.waterfall([ + (cb) => self.keystore.findKeyById(r.keyId, cb), + (key, cb) => self.keystore._getPrivateKey(key.name, cb) + ], (err, pem) => { + if (err) return callback(err); + + const privateKey = forge.pki.decryptRsaPrivateKey(pem, self.keystore._()) + cms.decrypt(r.recipient, privateKey) + async.setImmediate(() => callback(null, Buffer.from(cms.content.getBytes(), 'binary'))) + }) + } + ) + } + +} + +module.exports = CMS diff --git a/src/index.js b/src/index.js index ccacec309b..2704d6268a 100644 --- a/src/index.js +++ b/src/index.js @@ -1 +1,3 @@ 'use strict' + +module.exports = require('./keychain') diff --git a/src/keychain.js b/src/keychain.js new file mode 100644 index 0000000000..50a6798310 --- /dev/null +++ b/src/keychain.js @@ -0,0 +1,362 @@ +'use strict' + +const async = require('async') +const sanitize = require("sanitize-filename") +const forge = require('node-forge') +const deepmerge = require('deepmerge') +const crypto = require('crypto') +const libp2pCrypto = require('libp2p-crypto') +const util = require('./util') +const CMS = require('./cms') +const DS = require('interface-datastore') +const pull = require('pull-stream') + +const keyExtension = '.p8' + +// NIST SP 800-132 +const NIST = { + minKeyLength: 112 / 8, + minSaltLength: 128 / 8, + minIterationCount: 1000 +} + +const defaultOptions = { + // See https://cryptosense.com/parametesr-choice-for-pbkdf2/ + dek: { + keyLength: 512 / 8, + iterationCount: 10000, + salt: 'you should override this value with a crypto secure random number', + hash: 'sha512' + } +} + +function validateKeyName (name) { + if (!name) return false + + return name === sanitize(name.trim()) +} + +/** + * Returns an error to the caller, after a delay + * + * This assumes than an error indicates that the keychain is under attack. Delay returning an + * error to make brute force attacks harder. + * + * @param {function(Error)} callback - The caller + * @param {string | Error} err - The error + */ +function _error(callback, err) { + const min = 200 + const max = 1000 + const delay = Math.random() * (max - min) + min + if (typeof err === 'string') err = new Error(err) + setTimeout(callback, delay, err, null) +} + +/** + * Converts a key name into a datastore name. + */ +function DsName (name) { + return new DS.Key('/' + name) +} + +/** + * Converts a datastore name into a key name. + */ +function KsName(name) { + return name.toString().slice(1) +} + +class Keychain { + constructor (store, options) { + if (!store) { + throw new Error('store is required') + } + this.store = store + if (this.store.opts) { + this.store.opts.extension = keyExtension + } + + const opts = deepmerge(defaultOptions, options) + + // Enforce NIST SP 800-132 + if (!opts.passPhrase || opts.passPhrase.length < 20) { + throw new Error('passPhrase must be least 20 characters') + } + if (opts.dek.keyLength < NIST.minKeyLength) { + throw new Error(`dek.keyLength must be least ${NIST.minKeyLength} bytes`) + } + if (opts.dek.salt.length < NIST.minSaltLength) { + throw new Error(`dek.saltLength must be least ${NIST.minSaltLength} bytes`) + } + if (opts.dek.iterationCount < NIST.minIterationCount) { + throw new Error(`dek.iterationCount must be least ${NIST.minIterationCount}`) + } + this.dek = opts.dek + + // Create the derived encrypting key + let dek = forge.pkcs5.pbkdf2( + opts.passPhrase, + opts.dek.salt, + opts.dek.iterationCount, + opts.dek.keyLength, + opts.dek.hash) + dek = forge.util.bytesToHex(dek) + Object.defineProperty(this, '_', { value: () => dek }) + + // JS magick + this._getKeyInfo = this.findKeyByName = this._getKeyInfo.bind(this) + + // Provide access to protected messages + this.cms = new CMS(this) + } + + static get options() { + return defaultOptions + } + + createKey (name, type, size, callback) { + const self = this + + if (!validateKeyName(name) || name === 'self') { + return _error(callback, `Invalid key name '${name}'`) + } + const dsname = DsName(name) + self.store.has(dsname, (err, exists) => { + if (exists) return _error(callback, `Key '${name}' already exists'`) + + switch (type.toLowerCase()) { + case 'rsa': + if (size < 2048) { + return _error(callback, `Invalid RSA key size ${size}`) + } + forge.pki.rsa.generateKeyPair({bits: size, workers: -1}, (err, keypair) => { + if (err) return _error(callback, err) + + const pem = forge.pki.encryptRsaPrivateKey(keypair.privateKey, this._()); + return self.store.put(dsname, pem, (err) => { + if (err) return _error(callback, err) + + self._getKeyInfo(name, callback) + }) + }) + break; + + default: + return _error(callback, `Invalid key type '${type}'`) + } + }) + } + + listKeys (callback) { + const self = this + const query = { + keysOnly: true + } + pull( + self.store.query(query), + pull.collect((err, res) => { + if (err) return _error(callback, err) + + const names = res.map(r => KsName(r.key)) + async.map(names, self._getKeyInfo, callback) + }) + ) + } + + // TODO: not very efficent. + findKeyById (id, callback) { + this.listKeys((err, keys) => { + if (err) return _error(callback, err) + + const key = keys.find((k) => k.id === id) + callback(null, key) + }) + } + + removeKey (name, callback) { + const self = this + if (!validateKeyName(name) || name === 'self') { + return _error(callback, `Invalid key name '${name}'`) + } + const dsname = DsName(name) + self.store.has(dsname, (err, exists) => { + if (!exists) return _error(callback, `Key '${name}' does not exist'`) + + self.store.delete(dsname, callback) + }) + } + + renameKey(oldName, newName, callback) { + const self = this + if (!validateKeyName(oldName) || oldName === 'self') { + return _error(callback, `Invalid old key name '${oldName}'`) + } + if (!validateKeyName(newName) || newName === 'self') { + return _error(callback, `Invalid new key name '${newName}'`) + } + const oldDsname = DsName(oldName) + const newDsname = DsName(newName) + this.store.get(oldDsname, (err, res) => { + if (err) { + return _error(callback, `Key '${oldName}' does not exist. ${err.message}`) + } + const pem = res.toString() + self.store.has(newDsname, (err, exists) => { + if (exists) return _error(callback, `Key '${newName}' already exists'`) + + const batch = self.store.batch() + batch.put(newDsname, pem) + batch.delete(oldDsname) + batch.commit((err) => { + if (err) return _error(callback, err) + self._getKeyInfo(newName, callback) + }) + }) + }) + } + + exportKey (name, password, callback) { + if (!validateKeyName(name)) { + return _error(callback, `Invalid key name '${name}'`) + } + if (!password) { + return _error(callback, 'Password is required') + } + + const dsname = DsName(name) + this.store.get(dsname, (err, res) => { + if (err) { + return _error(callback, `Key '${name}' does not exist. ${err.message}`) + } + const pem = res.toString() + try { + const options = { + algorithm: 'aes256', + count: this.dek.iterationCount, + saltSize: NIST.minSaltLength, + prfAlgorithm: 'sha512' + } + const privateKey = forge.pki.decryptRsaPrivateKey(pem, this._()) + const res = forge.pki.encryptRsaPrivateKey(privateKey, password, options) + return callback(null, res) + } catch (e) { + _error(callback, e) + } + }) + } + + importKey(name, pem, password, callback) { + const self = this + if (!validateKeyName(name) || name === 'self') { + return _error(callback, `Invalid key name '${name}'`) + } + if (!pem) { + return _error(callback, 'PEM encoded key is required') + } + const dsname = DsName(name) + self.store.has(dsname, (err, exists) => { + if (exists) return _error(callback, `Key '${name}' already exists'`) + try { + const privateKey = forge.pki.decryptRsaPrivateKey(pem, password) + if (privateKey === null) { + return _error(callback, 'Cannot read the key, most likely the password is wrong') + } + const newpem = forge.pki.encryptRsaPrivateKey(privateKey, this._()); + return self.store.put(dsname, newpem, (err) => { + if (err) return _error(callback, err) + + this._getKeyInfo(name, callback) + }) + } catch (err) { + _error(callback, err) + } + }) + } + + importPeer (name, peer, callback) { + const self = this + if (!validateKeyName(name)) { + return _error(callback, `Invalid key name '${name}'`) + } + if (!peer || !peer.privKey) { + return _error(callback, 'Peer.privKey \is required') + } + const dsname = DsName(name) + self.store.has(dsname, (err, exists) => { + if (exists) return _error(callback, `Key '${name}' already exists'`) + + const privateKeyProtobuf = peer.marshalPrivKey() + libp2pCrypto.keys.unmarshalPrivateKey(privateKeyProtobuf, (err, key) => { + try { + const der = key.marshal() + const buf = forge.util.createBuffer(der.toString('binary')); + const obj = forge.asn1.fromDer(buf) + const privateKey = forge.pki.privateKeyFromAsn1(obj) + if (privateKey === null) { + return _error(callback, 'Cannot read the peer private key') + } + const pem = forge.pki.encryptRsaPrivateKey(privateKey, this._()); + return self.store.put(dsname, pem, (err) => { + if (err) return _error(callback, err) + + this._getKeyInfo(name, callback) + }) + } catch (err) { + _error(callback, err) + } + }) + }) + } + + /** + * Gets the private key as PEM encoded PKCS #8 + * + * @param {string} name + * @param {function(Error, string)} callback + */ + _getPrivateKey (name, callback) { + const self = this + if (!validateKeyName(name)) { + return _error(callback, `Invalid key name '${name}'`) + } + this.store.get(DsName(name), (err, res) => { + if (err) { + return _error(callback, `Key '${name}' does not exist. ${err.message}`) + } + callback(null, res.toString()) + }) + } + + _getKeyInfo (name, callback) { + const self = this + if (!validateKeyName(name)) { + return _error(callback, `Invalid key name '${name}'`) + } + + const dsname = DsName(name) + this.store.get(dsname, (err, res) => { + if (err) { + return _error(callback, `Key '${name}' does not exist. ${err.message}`) + } + const pem = res.toString() + try { + const privateKey = forge.pki.decryptRsaPrivateKey(pem, this._()) + util.keyId(privateKey, (err, kid) => { + if (err) return _error(callback, err) + + const info = { + name: name, + id: kid + } + return callback(null, info) + }) + } catch (e) { + _error(callback, e) + } + }) + } + +} + +module.exports = Keychain diff --git a/src/util.js b/src/util.js new file mode 100644 index 0000000000..c3cd5a1ff3 --- /dev/null +++ b/src/util.js @@ -0,0 +1,86 @@ +'use strict' + +const forge = require('node-forge') +const pki = forge.pki +const multihash = require('multihashes') +const rsaUtils = require('libp2p-crypto/src/keys/rsa-utils') +const rsaClass = require('libp2p-crypto/src/keys/rsa-class') + +exports = module.exports + +// Create an IPFS key id; the SHA-256 multihash of a public key. +// See https://github.com/richardschneider/ipfs-encryption/issues/16 +exports.keyId = (privateKey, callback) => { + try { + const publicKey = pki.setRsaPublicKey(privateKey.n, privateKey.e) + const spki = pki.publicKeyToSubjectPublicKeyInfo(publicKey) + const der = new Buffer(forge.asn1.toDer(spki).getBytes(), 'binary') + const jwk = rsaUtils.pkixToJwk(der) + const rsa = new rsaClass.RsaPublicKey(jwk) + rsa.hash((err, kid) => { + if (err) return callback(err) + + const kids = multihash.toB58String(kid) + return callback(null, kids) + }) + } catch (err) { + callback(err) + } +} + +exports.certificateForKey = (privateKey, callback) => { + exports.keyId(privateKey, (err, kid) => { + if (err) return callback(err) + + const publicKey = pki.setRsaPublicKey(privateKey.n, privateKey.e) + const cert = pki.createCertificate(); + cert.publicKey = publicKey; + cert.serialNumber = '01'; + cert.validity.notBefore = new Date(); + cert.validity.notAfter = new Date(); + cert.validity.notAfter.setFullYear(cert.validity.notBefore.getFullYear() + 10); + var attrs = [{ + name: 'organizationName', + value: 'ipfs' + }, { + shortName: 'OU', + value: 'keystore' + }, { + name: 'commonName', + value: kid + }]; + cert.setSubject(attrs); + cert.setIssuer(attrs); + cert.setExtensions([{ + name: 'basicConstraints', + cA: true + }, { + name: 'keyUsage', + keyCertSign: true, + digitalSignature: true, + nonRepudiation: true, + keyEncipherment: true, + dataEncipherment: true + }, { + name: 'extKeyUsage', + serverAuth: true, + clientAuth: true, + codeSigning: true, + emailProtection: true, + timeStamping: true + }, { + name: 'nsCertType', + client: true, + server: true, + email: true, + objsign: true, + sslCA: true, + emailCA: true, + objCA: true + }]); + // self-sign certificate + cert.sign(privateKey) + + return callback(null, cert) + }) +} diff --git a/test/browser.js b/test/browser.js new file mode 100644 index 0000000000..a2633bef73 --- /dev/null +++ b/test/browser.js @@ -0,0 +1,30 @@ +/* eslint-env mocha */ +'use strict' + +const async = require('async') +const LevelStore = require('datastore-level') + +// use in the browser with level.js +const browserStore = new LevelStore('my/db/name', {db: require('level-js')}) + +describe('browser', () => { + const datastore1 = new LevelStore('test-keystore-1', {db: require('level-js')}) + const datastore2 = new LevelStore('test-keystore-2', {db: require('level-js')}) + + before((done) => { + async.series([ + (cb) => datastore1.open(cb), + (cb) => datastore2.open(cb) + ], done) + }) + + after((done) => { + async.series([ + (cb) => datastore1.close(cb), + (cb) => datastore2.close(cb) + ], done) + }) + + require('./keychain.spec')(datastore1, datastore2) + require('./peerid') +}) diff --git a/test/index.spec.js b/test/index.spec.js deleted file mode 100644 index c638cf8684..0000000000 --- a/test/index.spec.js +++ /dev/null @@ -1,4 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -it('so much testing', () => {}) diff --git a/test/keychain.spec.js b/test/keychain.spec.js new file mode 100644 index 0000000000..cc1048cfe1 --- /dev/null +++ b/test/keychain.spec.js @@ -0,0 +1,356 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +chai.use(require('chai-string')) +const Keychain = require('..') +const PeerId = require('peer-id') + +module.exports = (datastore1, datastore2) => { + describe('keychain', () => { + const passPhrase = 'this is not a secure phrase' + const rsaKeyName = 'tajné jméno' + const renamedRsaKeyName = 'ชื่อลับ' + let rsaKeyInfo + let emptyKeystore + let ks + + before((done) => { + emptyKeystore = new Keychain(datastore1, { passPhrase: passPhrase }) + ks = new Keychain(datastore2, { passPhrase: passPhrase }) + done() + }) + + it('needs a pass phrase to encrypt a key', () => { + expect(() => new Keychain(datastore2)).to.throw() + }) + + it ('needs a NIST SP 800-132 non-weak pass phrase', () => { + expect(() => new Keychain(datastore2, { passPhrase: '< 20 character'})).to.throw() + }) + + it('needs a store to persist a key', () => { + expect(() => new Keychain(null, { passPhrase: passPhrase})).to.throw() + }) + + it('has default options', () => { + expect(Keychain.options).to.exist() + }) + + describe('key name', () => { + it('is a valid filename and non-ASCII', () => { + ks.removeKey('../../nasty', (err) => { + expect(err).to.exist() + expect(err).to.have.property('message', 'Invalid key name \'../../nasty\'') + }) + ks.removeKey('', (err) => { + expect(err).to.exist() + expect(err).to.have.property('message', 'Invalid key name \'\'') + }) + ks.removeKey(' ', (err) => { + expect(err).to.exist() + expect(err).to.have.property('message', 'Invalid key name \' \'') + }) + ks.removeKey(null, (err) => { + expect(err).to.exist() + expect(err).to.have.property('message', 'Invalid key name \'null\'') + }) + ks.removeKey(undefined, (err) => { + expect(err).to.exist() + expect(err).to.have.property('message', 'Invalid key name \'undefined\'') + }) + }) + }) + + describe('key', () => { + it('can be an RSA key', function (done) { + this.timeout(20 * 1000) + ks.createKey(rsaKeyName, 'rsa', 2048, (err, info) => { + expect(err).to.not.exist() + expect(info).exist() + rsaKeyInfo = info + done() + }) + }) + + it('has a name and id', () => { + expect(rsaKeyInfo).to.have.property('name', rsaKeyName) + expect(rsaKeyInfo).to.have.property('id') + }) + + it('is encrypted PEM encoded PKCS #8', (done) => { + ks._getPrivateKey(rsaKeyName, (err, pem) => { + expect(err).to.not.exist() + expect(pem).to.startsWith('-----BEGIN ENCRYPTED PRIVATE KEY-----') + done() + }) + }) + + it('does not overwrite existing key', (done) => { + ks.createKey(rsaKeyName, 'rsa', 2048, (err) => { + expect(err).to.exist() + done() + }) + }) + + it('cannot create the "self" key', (done) => { + ks.createKey('self', 'rsa', 2048, (err) => { + expect(err).to.exist() + done() + }) + }) + + describe('implements NIST SP 800-131A', () => { + it('disallows RSA length < 2048', (done) => { + ks.createKey('bad-nist-rsa', 'rsa', 1024, (err) => { + expect(err).to.exist() + expect(err).to.have.property('message', 'Invalid RSA key size 1024') + done() + }) + }) + }) + + }) + + describe('query', () => { + it('finds all existing keys', (done) => { + ks.listKeys((err, keys) => { + expect(err).to.not.exist() + expect(keys).to.exist() + const mykey = keys.find((k) => k.name === rsaKeyName) + expect(mykey).to.exist() + done() + }) + }) + + it('finds a key by name', (done) => { + ks.findKeyByName(rsaKeyName, (err, key) => { + expect(err).to.not.exist() + expect(key).to.exist() + expect(key).to.deep.equal(rsaKeyInfo) + done() + }) + }) + + it('finds a key by id', (done) => { + ks.findKeyById(rsaKeyInfo.id, (err, key) => { + expect(err).to.not.exist() + expect(key).to.exist() + expect(key).to.deep.equal(rsaKeyInfo) + done() + }) + }) + + it('returns the key\'s name and id', (done) => { + ks.listKeys((err, keys) => { + expect(err).to.not.exist() + expect(keys).to.exist() + keys.forEach((key) => { + expect(key).to.have.property('name') + expect(key).to.have.property('id') + }) + done() + }) + }) + }) + + describe('CMS protected data', () => { + const plainData = Buffer.from('This is a message from Alice to Bob') + let cms + + it('service is available', (done) => { + expect(ks).to.have.property('cms') + done() + }) + + it('is anonymous', (done) => { + ks.cms.createAnonymousEncryptedData(rsaKeyName, plainData, (err, msg) => { + expect(err).to.not.exist() + expect(msg).to.exist() + expect(msg).to.be.instanceOf(Buffer) + cms = msg + done() + }) + }) + + it('is a PKCS #7 message', (done) => { + ks.cms.readData("not CMS", (err) => { + expect(err).to.exist() + done() + }) + }) + + it('is a PKCS #7 binary message', (done) => { + ks.cms.readData(plainData, (err) => { + expect(err).to.exist() + done() + }) + }) + + it('cannot be read without the key', (done) => { + emptyKeystore.cms.readData(cms, (err, plain) => { + expect(err).to.exist() + done() + }) + }) + + it('can be read with the key', (done) => { + ks.cms.readData(cms, (err, plain) => { + expect(err).to.not.exist() + expect(plain).to.exist() + expect(plain.toString()).to.equal(plainData.toString()) + done() + }) + }) + + }) + + describe('exported key', () => { + let pemKey + + it('is a PKCS #8 encrypted pem', (done) => { + ks.exportKey(rsaKeyName, 'password', (err, pem) => { + expect(err).to.not.exist() + expect(pem).to.startsWith('-----BEGIN ENCRYPTED PRIVATE KEY-----') + pemKey = pem + done() + }) + }) + + it('can be imported', (done) => { + ks.importKey('imported-key', pemKey, 'password', (err, key) => { + expect(err).to.not.exist() + expect(key.name).to.equal('imported-key') + expect(key.id).to.equal(rsaKeyInfo.id) + done() + }) + }) + + it('cannot be imported as an existing key name', (done) => { + ks.importKey(rsaKeyName, pemKey, 'password', (err, key) => { + expect(err).to.exist() + done() + }) + }) + + it('cannot be imported with the wrong password', function (done) { + this.timeout(5 * 1000) + ks.importKey('a-new-name-for-import', pemKey, 'not the password', (err, key) => { + expect(err).to.exist() + done() + }) + }) + }) + + describe('peer id', () => { + const alicePrivKey = 'CAASpgkwggSiAgEAAoIBAQC2SKo/HMFZeBml1AF3XijzrxrfQXdJzjePBZAbdxqKR1Mc6juRHXij6HXYPjlAk01BhF1S3Ll4Lwi0cAHhggf457sMg55UWyeGKeUv0ucgvCpBwlR5cQ020i0MgzjPWOLWq1rtvSbNcAi2ZEVn6+Q2EcHo3wUvWRtLeKz+DZSZfw2PEDC+DGPJPl7f8g7zl56YymmmzH9liZLNrzg/qidokUv5u1pdGrcpLuPNeTODk0cqKB+OUbuKj9GShYECCEjaybJDl9276oalL9ghBtSeEv20kugatTvYy590wFlJkkvyl+nPxIH0EEYMKK9XRWlu9XYnoSfboiwcv8M3SlsjAgMBAAECggEAZtju/bcKvKFPz0mkHiaJcpycy9STKphorpCT83srBVQi59CdFU6Mj+aL/xt0kCPMVigJw8P3/YCEJ9J+rS8BsoWE+xWUEsJvtXoT7vzPHaAtM3ci1HZd302Mz1+GgS8Epdx+7F5p80XAFLDUnELzOzKftvWGZmWfSeDnslwVONkL/1VAzwKy7Ce6hk4SxRE7l2NE2OklSHOzCGU1f78ZzVYKSnS5Ag9YrGjOAmTOXDbKNKN/qIorAQ1bovzGoCwx3iGIatQKFOxyVCyO1PsJYT7JO+kZbhBWRRE+L7l+ppPER9bdLFxs1t5CrKc078h+wuUr05S1P1JjXk68pk3+kQKBgQDeK8AR11373Mzib6uzpjGzgNRMzdYNuExWjxyxAzz53NAR7zrPHvXvfIqjDScLJ4NcRO2TddhXAfZoOPVH5k4PJHKLBPKuXZpWlookCAyENY7+Pd55S8r+a+MusrMagYNljb5WbVTgN8cgdpim9lbbIFlpN6SZaVjLQL3J8TWH6wKBgQDSChzItkqWX11CNstJ9zJyUE20I7LrpyBJNgG1gtvz3ZMUQCn3PxxHtQzN9n1P0mSSYs+jBKPuoSyYLt1wwe10/lpgL4rkKWU3/m1Myt0tveJ9WcqHh6tzcAbb/fXpUFT/o4SWDimWkPkuCb+8j//2yiXk0a/T2f36zKMuZvujqQKBgC6B7BAQDG2H2B/ijofp12ejJU36nL98gAZyqOfpLJ+FeMz4TlBDQ+phIMhnHXA5UkdDapQ+zA3SrFk+6yGk9Vw4Hf46B+82SvOrSbmnMa+PYqKYIvUzR4gg34rL/7AhwnbEyD5hXq4dHwMNsIDq+l2elPjwm/U9V0gdAl2+r50HAoGALtsKqMvhv8HucAMBPrLikhXP/8um8mMKFMrzfqZ+otxfHzlhI0L08Bo3jQrb0Z7ByNY6M8epOmbCKADsbWcVre/AAY0ZkuSZK/CaOXNX/AhMKmKJh8qAOPRY02LIJRBCpfS4czEdnfUhYV/TYiFNnKRj57PPYZdTzUsxa/yVTmECgYBr7slQEjb5Onn5mZnGDh+72BxLNdgwBkhO0OCdpdISqk0F0Pxby22DFOKXZEpiyI9XYP1C8wPiJsShGm2yEwBPWXnrrZNWczaVuCbXHrZkWQogBDG3HGXNdU4MAWCyiYlyinIBpPpoAJZSzpGLmWbMWh28+RJS6AQX6KHrK1o2uw==' + let alice + + before(function (done) { + const encoded = Buffer.from(alicePrivKey, 'base64') + PeerId.createFromPrivKey(encoded, (err, id) => { + alice = id + done() + }) + }) + + it('private key can be imported', (done) => { + ks.importPeer('alice', alice, (err, key) => { + expect(err).to.not.exist() + expect(key.name).to.equal('alice') + expect(key.id).to.equal(alice.toB58String()) + done() + }) + }) + }) + + describe('rename', () => { + it('requires an existing key name', (done) => { + ks.renameKey('not-there', renamedRsaKeyName, (err) => { + expect(err).to.exist() + done() + }) + }) + + it('requires a valid new key name', (done) => { + ks.renameKey(rsaKeyName, '..\not-valid', (err) => { + expect(err).to.exist() + done() + }) + }) + + it('does not overwrite existing key', (done) => { + ks.renameKey(rsaKeyName, rsaKeyName, (err) => { + expect(err).to.exist() + done() + }) + }) + + it('cannot create the "self" key', (done) => { + ks.renameKey(rsaKeyName, 'self', (err) => { + expect(err).to.exist() + done() + }) + }) + + it('removes the existing key name', (done) => { + ks.renameKey(rsaKeyName, renamedRsaKeyName, (err, key) => { + expect(err).to.not.exist() + expect(key).to.exist() + expect(key).to.have.property('name', renamedRsaKeyName) + expect(key).to.have.property('id', rsaKeyInfo.id) + ks.findKeyByName(rsaKeyName, (err, key) => { + expect(err).to.exist() + done() + }) + }) + }) + + it('creates the new key name', (done) => { + ks.findKeyByName(renamedRsaKeyName, (err, key) => { + expect(err).to.not.exist() + expect(key).to.exist() + expect(key).to.have.property('name', renamedRsaKeyName) + done() + }) + }) + + it('does not change the key ID', (done) => { + ks.findKeyByName(renamedRsaKeyName, (err, key) => { + expect(err).to.not.exist() + expect(key).to.exist() + expect(key).to.have.property('name', renamedRsaKeyName) + expect(key).to.have.property('id', rsaKeyInfo.id) + done() + }) + }) + }) + + describe('key removal', () => { + it('cannot remove the "self" key', (done) => { + ks.removeKey('self', (err) => { + expect(err).to.exist() + done() + }) + }) + + it('cannot remove an unknown key', (done) => { + ks.removeKey('not-there', (err) => { + expect(err).to.exist() + done() + }) + }) + + it('can remove a known key', (done) => { + ks.removeKey(renamedRsaKeyName, (err) => { + expect(err).to.not.exist() + done() + }) + }) + }) + + }) +} diff --git a/test/node.js b/test/node.js new file mode 100644 index 0000000000..b003a7c8ba --- /dev/null +++ b/test/node.js @@ -0,0 +1,34 @@ +/* eslint-env mocha */ +'use strict' + +const os = require('os') +const path = require('path') +const rimraf = require('rimraf') +const async = require('async') +const FsStore = require('datastore-fs') + +describe('node', () => { + const store1 = path.join(os.tmpdir(), 'test-keystore-1') + const store2 = path.join(os.tmpdir(), 'test-keystore-2') + const datastore1 = new FsStore(store1) + const datastore2 = new FsStore(store2) + + before((done) => { + async.series([ + (cb) => datastore1.open(cb), + (cb) => datastore2.open(cb) + ], done) + }) + + after((done) => { + async.series([ + (cb) => datastore1.close(cb), + (cb) => datastore2.close(cb), + (cb) => rimraf(store1, cb), + (cb) => rimraf(store2, cb) + ], done) + }) + + require('./keychain.spec')(datastore1, datastore2) + require('./peerid') +}) diff --git a/test/peerid.js b/test/peerid.js new file mode 100644 index 0000000000..8d3063c437 --- /dev/null +++ b/test/peerid.js @@ -0,0 +1,105 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const PeerId = require('peer-id') +const multihash = require('multihashes') +const crypto = require('libp2p-crypto') +const rsaUtils = require('libp2p-crypto/src/keys/rsa-utils') +const rsaClass = require('libp2p-crypto/src/keys/rsa-class') + +const sample = { + id: '122019318b6e5e0cf93a2314bf01269a2cc23cd3dcd452d742cdb9379d8646f6e4a9', + privKey: 'CAASpgkwggSiAgEAAoIBAQC2SKo/HMFZeBml1AF3XijzrxrfQXdJzjePBZAbdxqKR1Mc6juRHXij6HXYPjlAk01BhF1S3Ll4Lwi0cAHhggf457sMg55UWyeGKeUv0ucgvCpBwlR5cQ020i0MgzjPWOLWq1rtvSbNcAi2ZEVn6+Q2EcHo3wUvWRtLeKz+DZSZfw2PEDC+DGPJPl7f8g7zl56YymmmzH9liZLNrzg/qidokUv5u1pdGrcpLuPNeTODk0cqKB+OUbuKj9GShYECCEjaybJDl9276oalL9ghBtSeEv20kugatTvYy590wFlJkkvyl+nPxIH0EEYMKK9XRWlu9XYnoSfboiwcv8M3SlsjAgMBAAECggEAZtju/bcKvKFPz0mkHiaJcpycy9STKphorpCT83srBVQi59CdFU6Mj+aL/xt0kCPMVigJw8P3/YCEJ9J+rS8BsoWE+xWUEsJvtXoT7vzPHaAtM3ci1HZd302Mz1+GgS8Epdx+7F5p80XAFLDUnELzOzKftvWGZmWfSeDnslwVONkL/1VAzwKy7Ce6hk4SxRE7l2NE2OklSHOzCGU1f78ZzVYKSnS5Ag9YrGjOAmTOXDbKNKN/qIorAQ1bovzGoCwx3iGIatQKFOxyVCyO1PsJYT7JO+kZbhBWRRE+L7l+ppPER9bdLFxs1t5CrKc078h+wuUr05S1P1JjXk68pk3+kQKBgQDeK8AR11373Mzib6uzpjGzgNRMzdYNuExWjxyxAzz53NAR7zrPHvXvfIqjDScLJ4NcRO2TddhXAfZoOPVH5k4PJHKLBPKuXZpWlookCAyENY7+Pd55S8r+a+MusrMagYNljb5WbVTgN8cgdpim9lbbIFlpN6SZaVjLQL3J8TWH6wKBgQDSChzItkqWX11CNstJ9zJyUE20I7LrpyBJNgG1gtvz3ZMUQCn3PxxHtQzN9n1P0mSSYs+jBKPuoSyYLt1wwe10/lpgL4rkKWU3/m1Myt0tveJ9WcqHh6tzcAbb/fXpUFT/o4SWDimWkPkuCb+8j//2yiXk0a/T2f36zKMuZvujqQKBgC6B7BAQDG2H2B/ijofp12ejJU36nL98gAZyqOfpLJ+FeMz4TlBDQ+phIMhnHXA5UkdDapQ+zA3SrFk+6yGk9Vw4Hf46B+82SvOrSbmnMa+PYqKYIvUzR4gg34rL/7AhwnbEyD5hXq4dHwMNsIDq+l2elPjwm/U9V0gdAl2+r50HAoGALtsKqMvhv8HucAMBPrLikhXP/8um8mMKFMrzfqZ+otxfHzlhI0L08Bo3jQrb0Z7ByNY6M8epOmbCKADsbWcVre/AAY0ZkuSZK/CaOXNX/AhMKmKJh8qAOPRY02LIJRBCpfS4czEdnfUhYV/TYiFNnKRj57PPYZdTzUsxa/yVTmECgYBr7slQEjb5Onn5mZnGDh+72BxLNdgwBkhO0OCdpdISqk0F0Pxby22DFOKXZEpiyI9XYP1C8wPiJsShGm2yEwBPWXnrrZNWczaVuCbXHrZkWQogBDG3HGXNdU4MAWCyiYlyinIBpPpoAJZSzpGLmWbMWh28+RJS6AQX6KHrK1o2uw==', + pubKey: 'CAASpgIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC2SKo/HMFZeBml1AF3XijzrxrfQXdJzjePBZAbdxqKR1Mc6juRHXij6HXYPjlAk01BhF1S3Ll4Lwi0cAHhggf457sMg55UWyeGKeUv0ucgvCpBwlR5cQ020i0MgzjPWOLWq1rtvSbNcAi2ZEVn6+Q2EcHo3wUvWRtLeKz+DZSZfw2PEDC+DGPJPl7f8g7zl56YymmmzH9liZLNrzg/qidokUv5u1pdGrcpLuPNeTODk0cqKB+OUbuKj9GShYECCEjaybJDl9276oalL9ghBtSeEv20kugatTvYy590wFlJkkvyl+nPxIH0EEYMKK9XRWlu9XYnoSfboiwcv8M3SlsjAgMBAAE=' +} + +describe('peer ID', () => { + let peer + let publicKeyDer // a buffer + + before(function (done) { + const encoded = Buffer.from(sample.privKey, 'base64') + PeerId.createFromPrivKey(encoded, (err, id) => { + peer = id + done() + }) + }) + + it('decoded public key', (done) => { + // console.log('peer id', peer.toJSON()) + // console.log('id', peer.toB58String()) + // console.log('id decoded', multihash.decode(peer.id)) + + // get protobuf version of the public key + const publicKeyProtobuf = peer.marshalPubKey() + const publicKey = crypto.keys.unmarshalPublicKey(publicKeyProtobuf) + // console.log('public key', publicKey) + publicKeyDer = publicKey.marshal() + // console.log('public key der', publicKeyDer.toString('base64')) + + // get protobuf version of the private key + const privateKeyProtobuf = peer.marshalPrivKey() + crypto.keys.unmarshalPrivateKey(privateKeyProtobuf, (err, key) => { + // console.log('private key', key) + // console.log('\nprivate key der', key.marshal().toString('base64')) + done() + }) + }) + + it('encoded public key with DER', (done) => { + const jwk = rsaUtils.pkixToJwk(publicKeyDer) + // console.log('jwk', jwk) + const rsa = new rsaClass.RsaPublicKey(jwk) + // console.log('rsa', rsa) + rsa.hash((err, keyId) => { + // console.log('err', err) + // console.log('keyId', keyId) + // console.log('id decoded', multihash.decode(keyId)) + const kids = multihash.toB58String(keyId) + // console.log('id', kids) + expect(kids).to.equal(peer.toB58String()) + done() + }) + }) + + it('encoded public key with JWT', (done) => { + const jwk = { + kty: 'RSA', + n: 'tkiqPxzBWXgZpdQBd14o868a30F3Sc43jwWQG3caikdTHOo7kR14o-h12D45QJNNQYRdUty5eC8ItHAB4YIH-Oe7DIOeVFsnhinlL9LnILwqQcJUeXENNtItDIM4z1ji1qta7b0mzXAItmRFZ-vkNhHB6N8FL1kbS3is_g2UmX8NjxAwvgxjyT5e3_IO85eemMpppsx_ZYmSza84P6onaJFL-btaXRq3KS7jzXkzg5NHKigfjlG7io_RkoWBAghI2smyQ5fdu-qGpS_YIQbUnhL9tJLoGrU72MufdMBZSZJL8pfpz8SB9BBGDCivV0VpbvV2J6En26IsHL_DN0pbIw', + e: 'AQAB', + alg: 'RS256', + kid: '2011-04-29' + } + // console.log('jwk', jwk) + const rsa = new rsaClass.RsaPublicKey(jwk) + // console.log('rsa', rsa) + rsa.hash((err, keyId) => { + // console.log('err', err) + // console.log('keyId', keyId) + // console.log('id decoded', multihash.decode(keyId)) + const kids = multihash.toB58String(keyId) + // console.log('id', kids) + expect(kids).to.equal(peer.toB58String()) + done() + }) + }) + + it('decoded private key', (done) => { + // console.log('peer id', peer.toJSON()) + // console.log('id', peer.toB58String()) + // console.log('id decoded', multihash.decode(peer.id)) + + // get protobuf version of the private key + const privateKeyProtobuf = peer.marshalPrivKey() + crypto.keys.unmarshalPrivateKey(privateKeyProtobuf, (err, key) => { + // console.log('private key', key) + //console.log('\nprivate key der', key.marshal().toString('base64')) + done() + }) + }) + +}) From 658a4d7907fc2d9cb611359b2dfd0f6952a78075 Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Wed, 6 Dec 2017 23:13:02 +1300 Subject: [PATCH 004/102] docs: install and links --- README.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 0f89dcdb77..90ade224ae 100644 --- a/README.md +++ b/README.md @@ -29,8 +29,13 @@ ## Install + npm install --save libp2p-keychain + ### Usage + const Keychain = require('libp2p-keychain') + const FsStore = require('datastore-fs') + const datastore = new FsStore('./a-keystore') const opts = { passPhrase: 'some long easily remembered phrase' @@ -91,7 +96,7 @@ const defaultOptions = { } ``` -![key storage](../doc/private-key.png?raw=true) +![key storage](./doc/private-key.png?raw=true) ### Physical storage From 409a9990cda06a8ed9434b437a549ae0f7a14056 Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Thu, 7 Dec 2017 00:10:22 +1300 Subject: [PATCH 005/102] fix: linting --- src/cms.js | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/cms.js b/src/cms.js index 2f2d9c7dc4..fb66c2dffb 100644 --- a/src/cms.js +++ b/src/cms.js @@ -10,7 +10,7 @@ class CMS { throw new Error('keystore is required') } - this.keystore = keystore; + this.keystore = keystore } createAnonymousEncryptedData (name, plain, callback) { @@ -53,7 +53,7 @@ class CMS { const self = this let cms try { - const buf = forge.util.createBuffer(cmsData.toString('binary')); + const buf = forge.util.createBuffer(cmsData.toString('binary')) const obj = forge.asn1.fromDer(buf) cms = forge.pkcs7.messageFromAsn1(obj) } catch (err) { @@ -82,7 +82,7 @@ class CMS { (cb) => self.keystore.findKeyById(r.keyId, cb), (key, cb) => self.keystore._getPrivateKey(key.name, cb) ], (err, pem) => { - if (err) return callback(err); + if (err) return callback(err) const privateKey = forge.pki.decryptRsaPrivateKey(pem, self.keystore._()) cms.decrypt(r.recipient, privateKey) @@ -91,7 +91,6 @@ class CMS { } ) } - } module.exports = CMS From 7c44c91788d3bf1a9bd366dfd79b5201b676da4e Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Thu, 7 Dec 2017 00:16:38 +1300 Subject: [PATCH 006/102] fix: more linting --- src/keychain.js | 49 +++++++++++++++++++++++++------------------ src/util.js | 22 +++++++++---------- test/browser.js | 7 ++----- test/keychain.spec.js | 15 +++++++------ test/peerid.js | 8 +++++-- 5 files changed, 55 insertions(+), 46 deletions(-) diff --git a/src/keychain.js b/src/keychain.js index 50a6798310..f4e4b0fcd6 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -1,11 +1,10 @@ 'use strict' const async = require('async') -const sanitize = require("sanitize-filename") +const sanitize = require('sanitize-filename') const forge = require('node-forge') const deepmerge = require('deepmerge') -const crypto = require('crypto') -const libp2pCrypto = require('libp2p-crypto') +const crypto = require('libp2p-crypto') const util = require('./util') const CMS = require('./cms') const DS = require('interface-datastore') @@ -32,7 +31,6 @@ const defaultOptions = { function validateKeyName (name) { if (!name) return false - return name === sanitize(name.trim()) } @@ -44,8 +42,9 @@ function validateKeyName (name) { * * @param {function(Error)} callback - The caller * @param {string | Error} err - The error + * @returns {undefined} */ -function _error(callback, err) { +function _error (callback, err) { const min = 200 const max = 1000 const delay = Math.random() * (max - min) + min @@ -55,6 +54,9 @@ function _error(callback, err) { /** * Converts a key name into a datastore name. + * + * @param {string} name + * @returns {DS.Key} */ function DsName (name) { return new DS.Key('/' + name) @@ -62,8 +64,11 @@ function DsName (name) { /** * Converts a datastore name into a key name. + * + * @param {DS.Key} name - A datastore name + * @returns {string} */ -function KsName(name) { +function KsName (name) { return name.toString().slice(1) } @@ -111,7 +116,7 @@ class Keychain { this.cms = new CMS(this) } - static get options() { + static get options () { return defaultOptions } @@ -123,6 +128,7 @@ class Keychain { } const dsname = DsName(name) self.store.has(dsname, (err, exists) => { + if (err) return _error(callback, err) if (exists) return _error(callback, `Key '${name}' already exists'`) switch (type.toLowerCase()) { @@ -133,14 +139,14 @@ class Keychain { forge.pki.rsa.generateKeyPair({bits: size, workers: -1}, (err, keypair) => { if (err) return _error(callback, err) - const pem = forge.pki.encryptRsaPrivateKey(keypair.privateKey, this._()); + const pem = forge.pki.encryptRsaPrivateKey(keypair.privateKey, this._()) return self.store.put(dsname, pem, (err) => { if (err) return _error(callback, err) self._getKeyInfo(name, callback) }) }) - break; + break default: return _error(callback, `Invalid key type '${type}'`) @@ -181,13 +187,14 @@ class Keychain { } const dsname = DsName(name) self.store.has(dsname, (err, exists) => { + if (err) return _error(callback, err) if (!exists) return _error(callback, `Key '${name}' does not exist'`) self.store.delete(dsname, callback) }) } - renameKey(oldName, newName, callback) { + renameKey (oldName, newName, callback) { const self = this if (!validateKeyName(oldName) || oldName === 'self') { return _error(callback, `Invalid old key name '${oldName}'`) @@ -203,6 +210,7 @@ class Keychain { } const pem = res.toString() self.store.has(newDsname, (err, exists) => { + if (err) return _error(callback, err) if (exists) return _error(callback, `Key '${newName}' already exists'`) const batch = self.store.batch() @@ -246,7 +254,7 @@ class Keychain { }) } - importKey(name, pem, password, callback) { + importKey (name, pem, password, callback) { const self = this if (!validateKeyName(name) || name === 'self') { return _error(callback, `Invalid key name '${name}'`) @@ -256,15 +264,16 @@ class Keychain { } const dsname = DsName(name) self.store.has(dsname, (err, exists) => { + if (err) return _error(callback, err) if (exists) return _error(callback, `Key '${name}' already exists'`) try { const privateKey = forge.pki.decryptRsaPrivateKey(pem, password) if (privateKey === null) { return _error(callback, 'Cannot read the key, most likely the password is wrong') } - const newpem = forge.pki.encryptRsaPrivateKey(privateKey, this._()); + const newpem = forge.pki.encryptRsaPrivateKey(privateKey, this._()) return self.store.put(dsname, newpem, (err) => { - if (err) return _error(callback, err) + if (err) return _error(callback, err) this._getKeyInfo(name, callback) }) @@ -280,23 +289,25 @@ class Keychain { return _error(callback, `Invalid key name '${name}'`) } if (!peer || !peer.privKey) { - return _error(callback, 'Peer.privKey \is required') + return _error(callback, 'Peer.privKey is required') } const dsname = DsName(name) self.store.has(dsname, (err, exists) => { + if (err) return _error(callback, err) if (exists) return _error(callback, `Key '${name}' already exists'`) const privateKeyProtobuf = peer.marshalPrivKey() - libp2pCrypto.keys.unmarshalPrivateKey(privateKeyProtobuf, (err, key) => { + crypto.keys.unmarshalPrivateKey(privateKeyProtobuf, (err, key) => { + if (err) return _error(callback, err) try { const der = key.marshal() - const buf = forge.util.createBuffer(der.toString('binary')); + const buf = forge.util.createBuffer(der.toString('binary')) const obj = forge.asn1.fromDer(buf) const privateKey = forge.pki.privateKeyFromAsn1(obj) if (privateKey === null) { return _error(callback, 'Cannot read the peer private key') } - const pem = forge.pki.encryptRsaPrivateKey(privateKey, this._()); + const pem = forge.pki.encryptRsaPrivateKey(privateKey, this._()) return self.store.put(dsname, pem, (err) => { if (err) return _error(callback, err) @@ -314,9 +325,9 @@ class Keychain { * * @param {string} name * @param {function(Error, string)} callback + * @returns {undefined} */ _getPrivateKey (name, callback) { - const self = this if (!validateKeyName(name)) { return _error(callback, `Invalid key name '${name}'`) } @@ -329,7 +340,6 @@ class Keychain { } _getKeyInfo (name, callback) { - const self = this if (!validateKeyName(name)) { return _error(callback, `Invalid key name '${name}'`) } @@ -356,7 +366,6 @@ class Keychain { } }) } - } module.exports = Keychain diff --git a/src/util.js b/src/util.js index c3cd5a1ff3..6066c33f2e 100644 --- a/src/util.js +++ b/src/util.js @@ -14,7 +14,7 @@ exports.keyId = (privateKey, callback) => { try { const publicKey = pki.setRsaPublicKey(privateKey.n, privateKey.e) const spki = pki.publicKeyToSubjectPublicKeyInfo(publicKey) - const der = new Buffer(forge.asn1.toDer(spki).getBytes(), 'binary') + const der = Buffer.from(forge.asn1.toDer(spki).getBytes(), 'binary') const jwk = rsaUtils.pkixToJwk(der) const rsa = new rsaClass.RsaPublicKey(jwk) rsa.hash((err, kid) => { @@ -33,12 +33,12 @@ exports.certificateForKey = (privateKey, callback) => { if (err) return callback(err) const publicKey = pki.setRsaPublicKey(privateKey.n, privateKey.e) - const cert = pki.createCertificate(); - cert.publicKey = publicKey; - cert.serialNumber = '01'; - cert.validity.notBefore = new Date(); - cert.validity.notAfter = new Date(); - cert.validity.notAfter.setFullYear(cert.validity.notBefore.getFullYear() + 10); + const cert = pki.createCertificate() + cert.publicKey = publicKey + cert.serialNumber = '01' + cert.validity.notBefore = new Date() + cert.validity.notAfter = new Date() + cert.validity.notAfter.setFullYear(cert.validity.notBefore.getFullYear() + 10) var attrs = [{ name: 'organizationName', value: 'ipfs' @@ -48,9 +48,9 @@ exports.certificateForKey = (privateKey, callback) => { }, { name: 'commonName', value: kid - }]; - cert.setSubject(attrs); - cert.setIssuer(attrs); + }] + cert.setSubject(attrs) + cert.setIssuer(attrs) cert.setExtensions([{ name: 'basicConstraints', cA: true @@ -77,7 +77,7 @@ exports.certificateForKey = (privateKey, callback) => { sslCA: true, emailCA: true, objCA: true - }]); + }]) // self-sign certificate cert.sign(privateKey) diff --git a/test/browser.js b/test/browser.js index a2633bef73..4e08b1375a 100644 --- a/test/browser.js +++ b/test/browser.js @@ -4,12 +4,9 @@ const async = require('async') const LevelStore = require('datastore-level') -// use in the browser with level.js -const browserStore = new LevelStore('my/db/name', {db: require('level-js')}) - describe('browser', () => { - const datastore1 = new LevelStore('test-keystore-1', {db: require('level-js')}) - const datastore2 = new LevelStore('test-keystore-2', {db: require('level-js')}) + const datastore1 = new LevelStore('test-keystore-1', {db: require('level-js')}) + const datastore2 = new LevelStore('test-keystore-2', {db: require('level-js')}) before((done) => { async.series([ diff --git a/test/keychain.spec.js b/test/keychain.spec.js index cc1048cfe1..25cae12256 100644 --- a/test/keychain.spec.js +++ b/test/keychain.spec.js @@ -1,3 +1,4 @@ +/* eslint max-nested-callbacks: ["error", 8] */ /* eslint-env mocha */ 'use strict' @@ -28,12 +29,12 @@ module.exports = (datastore1, datastore2) => { expect(() => new Keychain(datastore2)).to.throw() }) - it ('needs a NIST SP 800-132 non-weak pass phrase', () => { - expect(() => new Keychain(datastore2, { passPhrase: '< 20 character'})).to.throw() + it('needs a NIST SP 800-132 non-weak pass phrase', () => { + expect(() => new Keychain(datastore2, { passPhrase: '< 20 character' })).to.throw() }) it('needs a store to persist a key', () => { - expect(() => new Keychain(null, { passPhrase: passPhrase})).to.throw() + expect(() => new Keychain(null, { passPhrase: passPhrase })).to.throw() }) it('has default options', () => { @@ -112,7 +113,6 @@ module.exports = (datastore1, datastore2) => { }) }) }) - }) describe('query', () => { @@ -177,7 +177,7 @@ module.exports = (datastore1, datastore2) => { }) it('is a PKCS #7 message', (done) => { - ks.cms.readData("not CMS", (err) => { + ks.cms.readData('not CMS', (err) => { expect(err).to.exist() done() }) @@ -205,7 +205,6 @@ module.exports = (datastore1, datastore2) => { done() }) }) - }) describe('exported key', () => { @@ -243,7 +242,7 @@ module.exports = (datastore1, datastore2) => { done() }) }) - }) + }) describe('peer id', () => { const alicePrivKey = 'CAASpgkwggSiAgEAAoIBAQC2SKo/HMFZeBml1AF3XijzrxrfQXdJzjePBZAbdxqKR1Mc6juRHXij6HXYPjlAk01BhF1S3Ll4Lwi0cAHhggf457sMg55UWyeGKeUv0ucgvCpBwlR5cQ020i0MgzjPWOLWq1rtvSbNcAi2ZEVn6+Q2EcHo3wUvWRtLeKz+DZSZfw2PEDC+DGPJPl7f8g7zl56YymmmzH9liZLNrzg/qidokUv5u1pdGrcpLuPNeTODk0cqKB+OUbuKj9GShYECCEjaybJDl9276oalL9ghBtSeEv20kugatTvYy590wFlJkkvyl+nPxIH0EEYMKK9XRWlu9XYnoSfboiwcv8M3SlsjAgMBAAECggEAZtju/bcKvKFPz0mkHiaJcpycy9STKphorpCT83srBVQi59CdFU6Mj+aL/xt0kCPMVigJw8P3/YCEJ9J+rS8BsoWE+xWUEsJvtXoT7vzPHaAtM3ci1HZd302Mz1+GgS8Epdx+7F5p80XAFLDUnELzOzKftvWGZmWfSeDnslwVONkL/1VAzwKy7Ce6hk4SxRE7l2NE2OklSHOzCGU1f78ZzVYKSnS5Ag9YrGjOAmTOXDbKNKN/qIorAQ1bovzGoCwx3iGIatQKFOxyVCyO1PsJYT7JO+kZbhBWRRE+L7l+ppPER9bdLFxs1t5CrKc078h+wuUr05S1P1JjXk68pk3+kQKBgQDeK8AR11373Mzib6uzpjGzgNRMzdYNuExWjxyxAzz53NAR7zrPHvXvfIqjDScLJ4NcRO2TddhXAfZoOPVH5k4PJHKLBPKuXZpWlookCAyENY7+Pd55S8r+a+MusrMagYNljb5WbVTgN8cgdpim9lbbIFlpN6SZaVjLQL3J8TWH6wKBgQDSChzItkqWX11CNstJ9zJyUE20I7LrpyBJNgG1gtvz3ZMUQCn3PxxHtQzN9n1P0mSSYs+jBKPuoSyYLt1wwe10/lpgL4rkKWU3/m1Myt0tveJ9WcqHh6tzcAbb/fXpUFT/o4SWDimWkPkuCb+8j//2yiXk0a/T2f36zKMuZvujqQKBgC6B7BAQDG2H2B/ijofp12ejJU36nL98gAZyqOfpLJ+FeMz4TlBDQ+phIMhnHXA5UkdDapQ+zA3SrFk+6yGk9Vw4Hf46B+82SvOrSbmnMa+PYqKYIvUzR4gg34rL/7AhwnbEyD5hXq4dHwMNsIDq+l2elPjwm/U9V0gdAl2+r50HAoGALtsKqMvhv8HucAMBPrLikhXP/8um8mMKFMrzfqZ+otxfHzlhI0L08Bo3jQrb0Z7ByNY6M8epOmbCKADsbWcVre/AAY0ZkuSZK/CaOXNX/AhMKmKJh8qAOPRY02LIJRBCpfS4czEdnfUhYV/TYiFNnKRj57PPYZdTzUsxa/yVTmECgYBr7slQEjb5Onn5mZnGDh+72BxLNdgwBkhO0OCdpdISqk0F0Pxby22DFOKXZEpiyI9XYP1C8wPiJsShGm2yEwBPWXnrrZNWczaVuCbXHrZkWQogBDG3HGXNdU4MAWCyiYlyinIBpPpoAJZSzpGLmWbMWh28+RJS6AQX6KHrK1o2uw==' @@ -252,6 +251,7 @@ module.exports = (datastore1, datastore2) => { before(function (done) { const encoded = Buffer.from(alicePrivKey, 'base64') PeerId.createFromPrivKey(encoded, (err, id) => { + expect(err).to.not.exist() alice = id done() }) @@ -351,6 +351,5 @@ module.exports = (datastore1, datastore2) => { }) }) }) - }) } diff --git a/test/peerid.js b/test/peerid.js index 8d3063c437..7d6588cb85 100644 --- a/test/peerid.js +++ b/test/peerid.js @@ -24,6 +24,7 @@ describe('peer ID', () => { before(function (done) { const encoded = Buffer.from(sample.privKey, 'base64') PeerId.createFromPrivKey(encoded, (err, id) => { + expect(err).to.not.exist() peer = id done() }) @@ -44,6 +45,7 @@ describe('peer ID', () => { // get protobuf version of the private key const privateKeyProtobuf = peer.marshalPrivKey() crypto.keys.unmarshalPrivateKey(privateKeyProtobuf, (err, key) => { + expect(err).to.not.exist() // console.log('private key', key) // console.log('\nprivate key der', key.marshal().toString('base64')) done() @@ -56,6 +58,7 @@ describe('peer ID', () => { const rsa = new rsaClass.RsaPublicKey(jwk) // console.log('rsa', rsa) rsa.hash((err, keyId) => { + expect(err).to.not.exist() // console.log('err', err) // console.log('keyId', keyId) // console.log('id decoded', multihash.decode(keyId)) @@ -78,6 +81,7 @@ describe('peer ID', () => { const rsa = new rsaClass.RsaPublicKey(jwk) // console.log('rsa', rsa) rsa.hash((err, keyId) => { + expect(err).to.not.exist() // console.log('err', err) // console.log('keyId', keyId) // console.log('id decoded', multihash.decode(keyId)) @@ -96,10 +100,10 @@ describe('peer ID', () => { // get protobuf version of the private key const privateKeyProtobuf = peer.marshalPrivKey() crypto.keys.unmarshalPrivateKey(privateKeyProtobuf, (err, key) => { + expect(err).to.not.exist() // console.log('private key', key) - //console.log('\nprivate key der', key.marshal().toString('base64')) + // console.log('\nprivate key der', key.marshal().toString('base64')) done() }) }) - }) From 98ba68ac8245667578a83de621df4077d7d99529 Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Thu, 7 Dec 2017 00:24:39 +1300 Subject: [PATCH 007/102] test: needs more time to generate RSA key --- test/keychain.spec.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/keychain.spec.js b/test/keychain.spec.js index 25cae12256..4f526ace9f 100644 --- a/test/keychain.spec.js +++ b/test/keychain.spec.js @@ -68,7 +68,7 @@ module.exports = (datastore1, datastore2) => { describe('key', () => { it('can be an RSA key', function (done) { - this.timeout(20 * 1000) + this.timeout(50 * 1000) ks.createKey(rsaKeyName, 'rsa', 2048, (err, info) => { expect(err).to.not.exist() expect(info).exist() From 569f96342e8653e21eae101342a26dbf9791021f Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Thu, 7 Dec 2017 00:51:30 +1300 Subject: [PATCH 008/102] test: temporarily disable webworker tests #3 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 91167499bd..88b066a9e9 100644 --- a/package.json +++ b/package.json @@ -8,7 +8,7 @@ "build": "aegir build", "test": "aegir test", "test:node": "aegir test -t node", - "test:browser": "aegir test -t browser -t webworker", + "test:browser": "aegir test -t browser", "release": "aegir release", "release-minor": "aegir release --type minor", "release-major": "aegir release --type major" From 358c8c2ea1d83a60e68cc462b4383e6a60bd8bb8 Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Thu, 7 Dec 2017 01:06:51 +1300 Subject: [PATCH 009/102] test: disable webworker --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 88b066a9e9..e382c95c92 100644 --- a/package.json +++ b/package.json @@ -6,7 +6,7 @@ "scripts": { "lint": "aegir lint", "build": "aegir build", - "test": "aegir test", + "test": "aegir test -t node -t browser", "test:node": "aegir test -t node", "test:browser": "aegir test -t browser", "release": "aegir release", From 99780ab38ab4c7d7b7557e11b3d4495c34d2c2ba Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Thu, 7 Dec 2017 01:22:35 +1300 Subject: [PATCH 010/102] chore: ci coverage Fixes #2 --- package.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index e382c95c92..dba6e53367 100644 --- a/package.json +++ b/package.json @@ -11,7 +11,9 @@ "test:browser": "aegir test -t browser", "release": "aegir release", "release-minor": "aegir release --type minor", - "release-major": "aegir release --type major" + "release-major": "aegir release --type major", + "coverage": "aegir coverage", + "coverage-publish": "aegir-coverage publish" }, "pre-commit": [ "lint", From cfdd2f47bf5cdb83332c1fda2cd3c37d61d300df Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Thu, 7 Dec 2017 01:34:29 +1300 Subject: [PATCH 011/102] chore: publish coverage report --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index dba6e53367..f90524fe40 100644 --- a/package.json +++ b/package.json @@ -13,7 +13,7 @@ "release-minor": "aegir release --type minor", "release-major": "aegir release --type major", "coverage": "aegir coverage", - "coverage-publish": "aegir-coverage publish" + "coverage-publish": "aegir coverage publish" }, "pre-commit": [ "lint", From 643bcd4eb2fc426bc97f9293a41d0377142dcd3a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Kr=C3=BCger?= Date: Wed, 6 Dec 2017 13:40:12 +0100 Subject: [PATCH 012/102] Add syntax highlighting to README --- README.md | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index 90ade224ae..de4b333022 100644 --- a/README.md +++ b/README.md @@ -29,18 +29,22 @@ ## Install - npm install --save libp2p-keychain +```sh +npm install --save libp2p-keychain +``` ### Usage - const Keychain = require('libp2p-keychain') - const FsStore = require('datastore-fs') +```js +const Keychain = require('libp2p-keychain') +const FsStore = require('datastore-fs') - const datastore = new FsStore('./a-keystore') - const opts = { - passPhrase: 'some long easily remembered phrase' - } - const keychain = new Keychain(datastore, opts) +const datastore = new FsStore('./a-keystore') +const opts = { + passPhrase: 'some long easily remembered phrase' +} +const keychain = new Keychain(datastore, opts) +``` ## API @@ -68,7 +72,7 @@ Cryptographically protected messages The key management and naming service API all return a `KeyInfo` object. The `id` is a universally unique identifier for the key. The `name` is local to the key chain. -``` +```js { name: 'rsa-key', id: 'QmYWYSUZ4PV6MRFYpdtEDJBiGs4UrmE6g8wmAWSePekXVW' @@ -82,7 +86,7 @@ The **key id** is the SHA-256 [multihash](https://github.com/multiformats/multih A private key is stored as an encrypted PKCS 8 structure in the PEM format. It is protected by a key generated from the key chain's *passPhrase* using **PBKDF2**. Its file extension is `.p8`. The default options for generating the derived encryption key are in the `dek` object -``` +```js const defaultOptions = { createIfNeeded: true, From f49e753801851758b5d2fa1240e2d1b0c7d4661f Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Fri, 8 Dec 2017 14:45:02 +1300 Subject: [PATCH 013/102] fix: return info on removed key #10 --- src/keychain.js | 9 +++++---- test/keychain.spec.js | 5 ++++- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/src/keychain.js b/src/keychain.js index f4e4b0fcd6..e71cfa315d 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -186,11 +186,12 @@ class Keychain { return _error(callback, `Invalid key name '${name}'`) } const dsname = DsName(name) - self.store.has(dsname, (err, exists) => { + self._getKeyInfo(name, (err, keyinfo) => { if (err) return _error(callback, err) - if (!exists) return _error(callback, `Key '${name}' does not exist'`) - - self.store.delete(dsname, callback) + self.store.delete(dsname, (err) => { + if (err) return _error(callback, err) + callback(null, keyinfo) + }) }) } diff --git a/test/keychain.spec.js b/test/keychain.spec.js index 4f526ace9f..d0b61bcc20 100644 --- a/test/keychain.spec.js +++ b/test/keychain.spec.js @@ -345,8 +345,11 @@ module.exports = (datastore1, datastore2) => { }) it('can remove a known key', (done) => { - ks.removeKey(renamedRsaKeyName, (err) => { + ks.removeKey(renamedRsaKeyName, (err, key) => { expect(err).to.not.exist() + expect(key).to.exist() + expect(key).to.have.property('name', renamedRsaKeyName) + expect(key).to.have.property('id', rsaKeyInfo.id) done() }) }) From 8305d209b2f9ee23c996a5ee70513b324f6d495c Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Fri, 8 Dec 2017 14:46:38 +1300 Subject: [PATCH 014/102] fix: error message --- src/keychain.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/keychain.js b/src/keychain.js index e71cfa315d..20421e9ebc 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -129,7 +129,7 @@ class Keychain { const dsname = DsName(name) self.store.has(dsname, (err, exists) => { if (err) return _error(callback, err) - if (exists) return _error(callback, `Key '${name}' already exists'`) + if (exists) return _error(callback, `Key '${name}' already exists`) switch (type.toLowerCase()) { case 'rsa': @@ -212,7 +212,7 @@ class Keychain { const pem = res.toString() self.store.has(newDsname, (err, exists) => { if (err) return _error(callback, err) - if (exists) return _error(callback, `Key '${newName}' already exists'`) + if (exists) return _error(callback, `Key '${newName}' already exists`) const batch = self.store.batch() batch.put(newDsname, pem) @@ -266,7 +266,7 @@ class Keychain { const dsname = DsName(name) self.store.has(dsname, (err, exists) => { if (err) return _error(callback, err) - if (exists) return _error(callback, `Key '${name}' already exists'`) + if (exists) return _error(callback, `Key '${name}' already exists`) try { const privateKey = forge.pki.decryptRsaPrivateKey(pem, password) if (privateKey === null) { @@ -295,7 +295,7 @@ class Keychain { const dsname = DsName(name) self.store.has(dsname, (err, exists) => { if (err) return _error(callback, err) - if (exists) return _error(callback, `Key '${name}' already exists'`) + if (exists) return _error(callback, `Key '${name}' already exists`) const privateKeyProtobuf = peer.marshalPrivKey() crypto.keys.unmarshalPrivateKey(privateKeyProtobuf, (err, key) => { From 3b8d05abb81a6eb58d63c66afc64eed78bdf8551 Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Sat, 9 Dec 2017 20:37:00 +1300 Subject: [PATCH 015/102] docs(keychain): add API documentation --- .travis.yml | 4 --- src/keychain.js | 84 +++++++++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 82 insertions(+), 6 deletions(-) diff --git a/.travis.yml b/.travis.yml index 584f308f81..af201abaec 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,16 +13,12 @@ matrix: script: - npm run lint - npm run test - - npm run coverage - make test before_script: - export DISPLAY=:99.0 - sh -e /etc/init.d/xvfb start -after_success: - - npm run coverage-publish - addons: firefox: 'latest' apt: diff --git a/src/keychain.js b/src/keychain.js index 20421e9ebc..e4a61d7102 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -43,6 +43,7 @@ function validateKeyName (name) { * @param {function(Error)} callback - The caller * @param {string | Error} err - The error * @returns {undefined} + * @private */ function _error (callback, err) { const min = 200 @@ -57,6 +58,7 @@ function _error (callback, err) { * * @param {string} name * @returns {DS.Key} + * @private */ function DsName (name) { return new DS.Key('/' + name) @@ -67,12 +69,31 @@ function DsName (name) { * * @param {DS.Key} name - A datastore name * @returns {string} + * @private */ function KsName (name) { return name.toString().slice(1) } +/** + * Information about a key. + * + * @typedef {Object} KeyInfo + * + * @property {string} id - The universally unique key id. + * @property {string} name - The local key name. + */ + +/** + * Key management + */ class Keychain { + /** + * Creates a new instance of a key chain. + * + * @param {DS} store - where the key are. + * @param {object} options - ??? + */ constructor (store, options) { if (!store) { throw new Error('store is required') @@ -116,10 +137,24 @@ class Keychain { this.cms = new CMS(this) } + /** + * The default options for a keychain. + * + * @returns {object} + */ static get options () { return defaultOptions } + /** + * Create a new key. + * + * @param {string} name - The local key name; cannot already exist. + * @param {string} type - One of the key types; 'rsa'. + * @param {int} size - The key size in bits. + * @param {function(Error, KeyInfo)} callback + * @returns {undefined} + */ createKey (name, type, size, callback) { const self = this @@ -154,6 +189,12 @@ class Keychain { }) } + /** + * List all the keys. + * + * @param {function(Error, KeyInfo[])} callback + * @returns {undefined} + */ listKeys (callback) { const self = this const query = { @@ -170,8 +211,15 @@ class Keychain { ) } - // TODO: not very efficent. + /** + * Find a key by it's name. + * + * @param {string} id - The universally unique key identifier. + * @param {function(Error, KeyInfo)} callback + * @returns {undefined} + */ findKeyById (id, callback) { + // TODO: not very efficent. this.listKeys((err, keys) => { if (err) return _error(callback, err) @@ -180,6 +228,13 @@ class Keychain { }) } + /** + * Remove an existing key. + * + * @param {string} name - The local key name; must already exist. + * @param {function(Error, KeyInfo)} callback + * @returns {undefined} + */ removeKey (name, callback) { const self = this if (!validateKeyName(name) || name === 'self') { @@ -195,6 +250,14 @@ class Keychain { }) } + /** + * Rename a key + * + * @param {string} oldName - The old local key name; must already exist. + * @param {string} newName - The new local key name; must not already exist. + * @param {function(Error, KeyInfo)} callback + * @returns {undefined} + */ renameKey (oldName, newName, callback) { const self = this if (!validateKeyName(oldName) || oldName === 'self') { @@ -225,6 +288,14 @@ class Keychain { }) } + /** + * Export an existing key as a PEM encrypted PKCS #8 string + * + * @param {string} name - The local key name; must already exist. + * @param {string} password - The password + * @param {function(Error, string)} callback + * @returns {undefined} + */ exportKey (name, password, callback) { if (!validateKeyName(name)) { return _error(callback, `Invalid key name '${name}'`) @@ -255,6 +326,15 @@ class Keychain { }) } + /** + * Import a new key from a PEM encoded PKCS #8 string + * + * @param {string} name - The local key name; must not already exist. + * @param {string} pem - The PEM encoded PKCS #8 string + * @param {string} password - The password. + * @param {function(Error, KeyInfo)} callback + * @returns {undefined} + */ importKey (name, pem, password, callback) { const self = this if (!validateKeyName(name) || name === 'self') { @@ -322,7 +402,7 @@ class Keychain { } /** - * Gets the private key as PEM encoded PKCS #8 + * Gets the private key as PEM encoded PKCS #8 string. * * @param {string} name * @param {function(Error, string)} callback From f71d3a652186c364a1fc7af518540d316d70abf5 Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Sun, 10 Dec 2017 17:19:20 +1300 Subject: [PATCH 016/102] fix: maps an IPFS hash name to its forge equivalent Fixes #12 --- src/keychain.js | 22 ++++++++++++++++++++-- test/keychain.spec.js | 6 ++++++ 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/src/keychain.js b/src/keychain.js index e4a61d7102..7f9f508555 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -19,13 +19,26 @@ const NIST = { minIterationCount: 1000 } +/** + * Maps an IPFS hash name to its forge equivalent. + * + * See https://github.com/multiformats/multihash/blob/master/hashtable.csv + * + * @private + */ +const hashName2Forge = { + 'sha1': 'sha1', + 'sha2-256': 'sha256', + 'sha2-512': 'sha512', + +} const defaultOptions = { // See https://cryptosense.com/parametesr-choice-for-pbkdf2/ dek: { keyLength: 512 / 8, iterationCount: 10000, salt: 'you should override this value with a crypto secure random number', - hash: 'sha512' + hash: 'sha2-512' } } @@ -120,13 +133,18 @@ class Keychain { } this.dek = opts.dek + // Get the hashing alogorithm + const hashAlgorithm = hashName2Forge[opts.dek.hash] + if (!hashAlgorithm) + throw new Error(`dek.hash '${opts.dek.hash}' is unknown or not supported`) + // Create the derived encrypting key let dek = forge.pkcs5.pbkdf2( opts.passPhrase, opts.dek.salt, opts.dek.iterationCount, opts.dek.keyLength, - opts.dek.hash) + hashAlgorithm) dek = forge.util.bytesToHex(dek) Object.defineProperty(this, '_', { value: () => dek }) diff --git a/test/keychain.spec.js b/test/keychain.spec.js index d0b61bcc20..75a9f5feba 100644 --- a/test/keychain.spec.js +++ b/test/keychain.spec.js @@ -41,6 +41,12 @@ module.exports = (datastore1, datastore2) => { expect(Keychain.options).to.exist() }) + it('needs a supported hashing alorithm', () => { + const ok = new Keychain(datastore2, { passPhrase: passPhrase, dek: { hash: 'sha2-256' } }) + expect(ok).to.exist() + expect(() => new Keychain(datastore2, { passPhrase: passPhrase, dek: { hash: 'my-hash' } })).to.throw() + }) + describe('key name', () => { it('is a valid filename and non-ASCII', () => { ks.removeKey('../../nasty', (err) => { From ff4f6562483b84577d326696adaab9a186be2c56 Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Sun, 10 Dec 2017 17:21:26 +1300 Subject: [PATCH 017/102] fix: lint errors --- src/keychain.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/keychain.js b/src/keychain.js index 7f9f508555..fd3409e308 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -29,9 +29,9 @@ const NIST = { const hashName2Forge = { 'sha1': 'sha1', 'sha2-256': 'sha256', - 'sha2-512': 'sha512', - + 'sha2-512': 'sha512' } + const defaultOptions = { // See https://cryptosense.com/parametesr-choice-for-pbkdf2/ dek: { From 06917f7aba347a910e6054c7b5683a363100d817 Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Sun, 10 Dec 2017 17:37:16 +1300 Subject: [PATCH 018/102] fix: lint errors --- src/keychain.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/keychain.js b/src/keychain.js index fd3409e308..789928b1a0 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -27,7 +27,7 @@ const NIST = { * @private */ const hashName2Forge = { - 'sha1': 'sha1', + sha1: 'sha1', 'sha2-256': 'sha256', 'sha2-512': 'sha512' } @@ -135,8 +135,9 @@ class Keychain { // Get the hashing alogorithm const hashAlgorithm = hashName2Forge[opts.dek.hash] - if (!hashAlgorithm) + if (!hashAlgorithm) { throw new Error(`dek.hash '${opts.dek.hash}' is unknown or not supported`) + } // Create the derived encrypting key let dek = forge.pkcs5.pbkdf2( From 2dd069b05a0cd6b84bdb4c07b944a0d024c6758f Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Sun, 10 Dec 2017 21:21:10 +1300 Subject: [PATCH 019/102] test: importing openssl keys --- test/browser.js | 1 + test/node.js | 1 + test/openssl.js | 154 ++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 156 insertions(+) create mode 100644 test/openssl.js diff --git a/test/browser.js b/test/browser.js index 4e08b1375a..9584a635c4 100644 --- a/test/browser.js +++ b/test/browser.js @@ -23,5 +23,6 @@ describe('browser', () => { }) require('./keychain.spec')(datastore1, datastore2) + require('./openssl')(datastore1) require('./peerid') }) diff --git a/test/node.js b/test/node.js index b003a7c8ba..634716cf10 100644 --- a/test/node.js +++ b/test/node.js @@ -30,5 +30,6 @@ describe('node', () => { }) require('./keychain.spec')(datastore1, datastore2) + require('./openssl')(datastore1) require('./peerid') }) diff --git a/test/openssl.js b/test/openssl.js new file mode 100644 index 0000000000..17865d7fdd --- /dev/null +++ b/test/openssl.js @@ -0,0 +1,154 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const Keychain = require('..') + +module.exports = (datastore1) => { + describe('interop with openssl', () => { + const passPhrase = 'this is not a secure phrase' + const keyName = 'openssl-key' + let ks + + before((done) => { + ks = new Keychain(datastore1, { passPhrase: passPhrase }) + done() + }) + + it('can read a private key', (done) => { + /* + * Generated with + * openssl genpkey -algorithm RSA + * -pkeyopt rsa_keygen_bits:3072 + * -pkeyopt rsa_keygen_pubexp:65537 + */ + const pem = `-----BEGIN PRIVATE KEY----- +MIIG/wIBADANBgkqhkiG9w0BAQEFAASCBukwggblAgEAAoIBgQDp0Whyqa8KmdvK +0MsQGJEBzDAEHAZc0C6cr0rkb6Xwo+yB5kjZBRDORk0UXtYGE1pYt4JhUTmMzcWO +v2xTIsdbVMQlNtput2U8kIqS1cSTkX5HxOJtCiIzntMzuR/bGPSOexkyFQ8nCUqb +ROS7cln/ixprra2KMAKldCApN3ue2jo/JI1gyoS8sekhOASAa0ufMPpC+f70sc75 +Y53VLnGBNM43iM/2lsK+GI2a13d6rRy86CEM/ygnh/EDlyNDxo+SQmy6GmSv/lmR +xgWQE2dIfK504KIxFTOphPAQAr9AsmcNnCQLhbz7YTsBz8WcytHGQ0Z5pnBQJ9AV +CX9E6DFHetvs0CNLVw1iEO06QStzHulmNEI/3P8I1TIxViuESJxSu3pSNwG1bSJZ ++Qee24vvlz/slBzK5gZWHvdm46v7vl5z7SA+whncEtjrswd8vkJk9fI/YTUbgOC0 +HWMdc2t/LTZDZ+LUSZ/b2n5trvdJSsOKTjEfuf0wICC08pUUk8MCAwEAAQKCAYEA +ywve+DQCneIezHGk5cVvp2/6ApeTruXalJZlIxsRr3eq2uNwP4X2oirKpPX2RjBo +NMKnpnsyzuOiu+Pf3hJFrTpfWzHXXm5Eq+OZcwnQO5YNY6XGO4qhSNKT9ka9Mzbo +qRKdPrCrB+s5rryVJXKYVSInP3sDSQ2IPsYpZ6GW6Mv56PuFCpjTzElzejV7M0n5 +0bRmn+MZVMVUR54KYiaCywFgUzmr3yfs1cfcsKqMRywt2J58lRy/chTLZ6LILQMv +4V01neVJiRkTmUfIWvc1ENIFM9QJlky9AvA5ASvwTTRz8yOnxoOXE/y4OVyOePjT +cz9eumu9N5dPuUIMmsYlXmRNaeGZPD9bIgKY5zOlfhlfZSuOLNH6EHBNr6JAgfwL +pdP43sbg2SSNKpBZ0iSMvpyTpbigbe3OyhnFH/TyhcC2Wdf62S9/FRsvjlRPbakW +YhKAA2kmJoydcUDO5ccEga8b7NxCdhRiczbiU2cj70pMIuOhDlGAznyxsYbtyxaB +AoHBAPy6Cbt6y1AmuId/HYfvms6i8B+/frD1CKyn+sUDkPf81xSHV7RcNrJi1S1c +V55I0y96HulsR+GmcAW1DF3qivWkdsd/b4mVkizd/zJm3/Dm8p8QOnNTtdWvYoEB +VzfAhBGaR/xflSLxZh2WE8ZHQ3IcRCXV9ZFgJ7PMeTprBJXzl0lTptvrHyo9QK1v +obLrL/KuXWS0ql1uSnJr1vtDI5uW8WU4GDENeU5b/CJHpKpjVxlGg+7pmLknxlBl +oBnZnQKBwQDs2Ky29qZ69qnPWowKceMJ53Z6uoUeSffRZ7xuBjowpkylasEROjuL +nyAihIYB7fd7R74CnRVYLI+O2qXfNKJ8HN+TgcWv8LudkRcnZDSvoyPEJAPyZGfr +olRCXD3caqtarlZO7vXSAl09C6HcL2KZ8FuPIEsuO0Aw25nESMg9eVMaIC6s2eSU +NUt6xfZw1JC0c+f0LrGuFSjxT2Dr5WKND9ageI6afuauMuosjrrOMl2g0dMcSnVz +KrtYa7Wi1N8CgcBFnuJreUplDCWtfgEen40f+5b2yAQYr4fyOFxGxdK73jVJ/HbW +wsh2n+9mDZg9jIZQ/+1gFGpA6V7W06dSf/hD70ihcKPDXSbloUpaEikC7jxMQWY4 +uwjOkwAp1bq3Kxu21a+bAKHO/H1LDTrpVlxoJQ1I9wYtRDXrvBpxU2XyASbeFmNT +FhSByFn27Ve4OD3/NrWXtoVwM5/ioX6ZvUcj55McdTWE3ddbFNACiYX9QlyOI/TY +bhWafDCPmU9fj6kCgcEAjyQEfi9jPj2FM0RODqH1zS6OdG31tfCOTYicYQJyeKSI +/hAezwKaqi9phHMDancfcupQ89Nr6vZDbNrIFLYC3W+1z7hGeabMPNZLYAs3rE60 +dv4tRHlaNRbORazp1iTBmvRyRRI2js3O++3jzOb2eILDUyT5St+UU/LkY7R5EG4a +w1df3idx9gCftXufDWHqcqT6MqFl0QgIzo5izS68+PPxitpRlR3M3Mr4rCU20Rev +blphdF+rzAavYyj1hYuRAoHBANmxwbq+QqsJ19SmeGMvfhXj+T7fNZQFh2F0xwb2 +rMlf4Ejsnx97KpCLUkoydqAs2q0Ws9Nkx2VEVx5KfUD7fWhgbpdnEPnQkfeXv9sD +vZTuAoqInN1+vj1TME6EKR/6D4OtQygSNpecv23EuqEvyXWqRVsRt9Qd2B0H4k7h +gnjREs10u7zyqBIZH7KYVgyh27WxLr859ap8cKAH6Fb+UOPtZo3sUeeume60aebn +4pMwXeXP+LO8NIfRXV8mgrm86g== +-----END PRIVATE KEY----- +` + ks.importKey(keyName, pem, '', (err, key) => { + expect(err).to.not.exist() + expect(key).to.exist() + expect(key).to.have.property('name', keyName) + expect(key).to.have.property('id') + ks.removeKey(keyName, done) + }) + }) + + // TODO: net.forge can not cope with this + // Uncaught AssertionError: expected [Error: Cannot read encrypted PBE data block. Unsupported OID.] to not exist + it.skip('can read a private encrypted key (v1)', (done) => { + /* + * Generated with + * openssl genpkey -algorithm RSA + * -pkeyopt rsa_keygen_bits:1024 + * -pkeyopt rsa_keygen_pubexp:65537 + * -out foo.pem + * openssl pkcs8 -in foo.pem -topk8 -passout pass:mypassword + */ + const pem = `-----BEGIN ENCRYPTED PRIVATE KEY----- +MIICoTAbBgkqhkiG9w0BBQMwDgQI2563Jugj/KkCAggABIICgPxHkKtUUE8EWevq +eX9nTjqpbsv0QoXQMhegfxDELJLU8tj6V0bWNt7QDdfQ1n6FRgnNvNGick6gyqHH +yH9qC2oXwkDFP7OrHp2NEZd7DHQLLc+L4KJ/0dzsiZ1U9no7XzQMUay9Bc918ADE +pN2/EqigWkaG4gNjkAeKWr6+BNRevDXlSvls7YDboNcTiACi5zJkthivB9g3vT1m +gPdN6Gf/mmqtBTDHeqj5QsmXYqeCyo5b26JgYsziABVZDHph4ekPUsTvudRpE9Ex +baXwdYEAZxVpSbTvQ3A5qysjSZeM9ttfRTSSwL391q7dViz4+aujpk0Vj7piH+1B +CkfO8/XudRdRlnOe+KjMidktKCsMGCIOW92IlfMvIQ/Zn1GTYj9bRXONFNJ2WPND +UmCKnL7cmworwg/weRorrGKBWIGspU+tDASOPSvIGKo6Hoxm4CN1TpDRY7DAGlgm +Y3TEbMYfpXyzkPjvAhJDt03D3J9PrTO6uM5d7YUaaTmJ2TQFQVF2Lc3Uz8lDJLs0 +ZYtfQ/4H+YY2RrX7ua7t6ArUcYXZtv0J4lRYWjwV8fGPUVc0d8xLJU0Yjf4BD7K8 +rsavHo9b5YvBUX7SgUyxAEembEOe3SjQ+gPu2U5wovcjUuC9eItEEsXGrx30BQ0E +8BtK2+hp0eMkW5/BYckJkH+Yl8ypbzRGRRIZzLgeI4JveSx/mNhewfgTr+ORPThZ +mBdkD5r+ixWF174naw53L8U9wF8kiK7pIE1N9TR4USEeovLwX6Ni/2MMDZedOfof +2f77eUdLsK19/5/lcgAAYaXauXWhy2d2r3SayFrC9woy0lh2VLKRMBjcx1oWb7dp +0uxzo5Y= +-----END ENCRYPTED PRIVATE KEY----- +` + ks.importKey(keyName, pem, 'mypassword', (err, key) => { + expect(err).to.not.exist() + expect(key).to.exist() + expect(key).to.have.property('name', keyName) + expect(key).to.have.property('id') + ks.removeKey(keyName, done) + }) + }) + + it('can read a private encrypted key (v2)', (done) => { + /* + * Generated with + * openssl genpkey -algorithm RSA + * -pkeyopt rsa_keygen_bits:1024 + * -pkeyopt rsa_keygen_pubexp:65537 + * -out foo.pem + * openssl pkcs8 -in foo.pem -topk8 -v2 aes-256-cbc -passout pass:mypassword + */ + const pem = `-----BEGIN ENCRYPTED PRIVATE KEY----- +MIICzzBJBgkqhkiG9w0BBQ0wPDAbBgkqhkiG9w0BBQwwDgQIhuL894loRucCAggA +MB0GCWCGSAFlAwQBKgQQEoEtsjW3iC9/u0uGvkxX7wSCAoAsX3l6JoR2OGbT8CkY +YT3RQFqquOgItYOHw6E3tir2YrmxEAo99nxoL8pdto37KSC32eAGnfv5R1zmHHSx +0M3/y2AWiCBTX95EEzdtGC1hK3PBa/qpp/xEmcrsjYN6NXxMAkhC0hMP/HdvqMAg +ee7upvaYJsJcl8QLFNayAWr8b8cZA/RBhGEIRl59Eyj6nNtxDt3bCrfe06o1CPCV +50/fRZEwFOi/C6GYvPN6MrPZO3ALBWgopLT2yQqycTKtfxYWIdOsMBkAjKf2D6Pk +u2mqBsaP4b71jIIeT4euSJLsoJV+O39s8YHXtW8GtOqp7V5kIlnm90lZ9wzeLTZ7 +HJsD/jEdYto5J3YWm2wwEDccraffJSm7UDtJBvQdIx832kxeFCcGQjW38Zl1qqkg +iTH1PLTypxj2ZuviS2EkXVFb/kVU6leWwOt6fqWFC58UvJKeCk/6veazz3PDnTWM +92ClUqFd+CZn9VT4CIaJaAc6v5NLpPp+T9sRX9AtequPm7FyTeevY9bElfyk9gW9 +JDKgKxs6DGWDa16RL5vzwtU+G3o6w6IU+mEwa6/c+hN+pRFs/KBNLLSP9OHBx7BJ +X/32Ft+VFhJaK+lQ+f+hve7od/bgKnz4c/Vtp7Dh51DgWgCpBgb8p0vqu02vTnxD +BXtDv3h75l5PhvdWfVIzpMWRYFvPR+vJi066FjAz2sjYc0NMLSYtZWyWoIInjhoX +Dp5CQujCtw/ZSSlwde1DKEWAW4SeDZAOQNvuz0rU3eosNUJxEmh3aSrcrRtDpw+Y +mBUuWAZMpz7njBi7h+JDfmSW/GAaMwrVFC2gef5375R0TejAh+COAjItyoeYEvv8 +DQd8 +-----END ENCRYPTED PRIVATE KEY----- +` + ks.importKey(keyName, pem, 'mypassword', (err, key) => { + expect(err).to.not.exist() + expect(key).to.exist() + expect(key).to.have.property('name', keyName) + expect(key).to.have.property('id') + ks.removeKey(keyName, done) + }) + }) + }) +} From 1b2664a902742276ae94bd759aaa474d19d17cd6 Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Mon, 11 Dec 2017 14:25:54 +1300 Subject: [PATCH 020/102] refactor: keep the key info in the store --- src/keychain.js | 164 ++++++++++++++++++++++++++---------------- test/keychain.spec.js | 10 +++ 2 files changed, 111 insertions(+), 63 deletions(-) diff --git a/src/keychain.js b/src/keychain.js index 789928b1a0..004174d637 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -1,6 +1,5 @@ 'use strict' -const async = require('async') const sanitize = require('sanitize-filename') const forge = require('node-forge') const deepmerge = require('deepmerge') @@ -10,7 +9,8 @@ const CMS = require('./cms') const DS = require('interface-datastore') const pull = require('pull-stream') -const keyExtension = '.p8' +const keyPrefix = '/pkcs8/' +const infoPrefix = '/info/' // NIST SP 800-132 const NIST = { @@ -74,18 +74,18 @@ function _error (callback, err) { * @private */ function DsName (name) { - return new DS.Key('/' + name) + return new DS.Key(keyPrefix + name) } /** - * Converts a datastore name into a key name. + * Converts a key name into a datastore info name. * - * @param {DS.Key} name - A datastore name - * @returns {string} + * @param {string} name + * @returns {DS.Key} * @private */ -function KsName (name) { - return name.toString().slice(1) +function DsInfoName (name) { + return new DS.Key(infoPrefix + name) } /** @@ -98,7 +98,12 @@ function KsName (name) { */ /** - * Key management + * Manages the lifecycle of a key. Keys are encrypted at rest using PKCS #8. + * + * A key in the store has two entries + * - '/info/key-name', contains the KeyInfo for the key + * - '/pkcs8/key-name', contains the PKCS #8 for the key + * */ class Keychain { /** @@ -112,9 +117,6 @@ class Keychain { throw new Error('store is required') } this.store = store - if (this.store.opts) { - this.store.opts.extension = keyExtension - } const opts = deepmerge(defaultOptions, options) @@ -149,9 +151,6 @@ class Keychain { dek = forge.util.bytesToHex(dek) Object.defineProperty(this, '_', { value: () => dek }) - // JS magick - this._getKeyInfo = this.findKeyByName = this._getKeyInfo.bind(this) - // Provide access to protected messages this.cms = new CMS(this) } @@ -192,12 +191,22 @@ class Keychain { } forge.pki.rsa.generateKeyPair({bits: size, workers: -1}, (err, keypair) => { if (err) return _error(callback, err) - - const pem = forge.pki.encryptRsaPrivateKey(keypair.privateKey, this._()) - return self.store.put(dsname, pem, (err) => { + util.keyId(keypair.privateKey, (err, kid) => { if (err) return _error(callback, err) - self._getKeyInfo(name, callback) + const pem = forge.pki.encryptRsaPrivateKey(keypair.privateKey, this._()) + const keyInfo = { + name: name, + id: kid + } + const batch = self.store.batch() + batch.put(dsname, pem) + batch.put(DsInfoName(name), JSON.stringify(keyInfo)) + batch.commit((err) => { + if (err) return _error(callback, err) + + callback(null, keyInfo) + }) }) }) break @@ -217,28 +226,27 @@ class Keychain { listKeys (callback) { const self = this const query = { - keysOnly: true + prefix: infoPrefix } pull( self.store.query(query), pull.collect((err, res) => { if (err) return _error(callback, err) - const names = res.map(r => KsName(r.key)) - async.map(names, self._getKeyInfo, callback) + const info = res.map(r => JSON.parse(r.value)) + callback(null, info) }) ) } /** - * Find a key by it's name. + * Find a key by it's id. * * @param {string} id - The universally unique key identifier. * @param {function(Error, KeyInfo)} callback * @returns {undefined} */ findKeyById (id, callback) { - // TODO: not very efficent. this.listKeys((err, keys) => { if (err) return _error(callback, err) @@ -247,6 +255,28 @@ class Keychain { }) } + /** + * Find a key by it's name. + * + * @param {string} name - The local key name. + * @param {function(Error, KeyInfo)} callback + * @returns {undefined} + */ + findKeyByName (name, callback) { + if (!validateKeyName(name)) { + return _error(callback, `Invalid key name '${name}'`) + } + + const dsname = DsInfoName(name) + this.store.get(dsname, (err, res) => { + if (err) { + return _error(callback, `Key '${name}' does not exist. ${err.message}`) + } + + callback(null, JSON.parse(res.toString())) + }) + } + /** * Remove an existing key. * @@ -260,9 +290,12 @@ class Keychain { return _error(callback, `Invalid key name '${name}'`) } const dsname = DsName(name) - self._getKeyInfo(name, (err, keyinfo) => { + self.findKeyByName(name, (err, keyinfo) => { if (err) return _error(callback, err) - self.store.delete(dsname, (err) => { + const batch = self.store.batch() + batch.delete(dsname) + batch.delete(DsInfoName(name)) + batch.commit((err) => { if (err) return _error(callback, err) callback(null, keyinfo) }) @@ -287,6 +320,8 @@ class Keychain { } const oldDsname = DsName(oldName) const newDsname = DsName(newName) + const oldInfoName = DsInfoName(oldName) + const newInfoName = DsInfoName(newName) this.store.get(oldDsname, (err, res) => { if (err) { return _error(callback, `Key '${oldName}' does not exist. ${err.message}`) @@ -296,12 +331,20 @@ class Keychain { if (err) return _error(callback, err) if (exists) return _error(callback, `Key '${newName}' already exists`) - const batch = self.store.batch() - batch.put(newDsname, pem) - batch.delete(oldDsname) - batch.commit((err) => { + self.store.get(oldInfoName, (err, res) => { if (err) return _error(callback, err) - self._getKeyInfo(newName, callback) + + const keyInfo = JSON.parse(res.toString()) + keyInfo.name = newName + const batch = self.store.batch() + batch.put(newDsname, pem) + batch.put(newInfoName, JSON.stringify(keyInfo)) + batch.delete(oldDsname) + batch.delete(oldInfoName) + batch.commit((err) => { + if (err) return _error(callback, err) + callback(null, keyInfo) + }) }) }) }) @@ -372,10 +415,21 @@ class Keychain { return _error(callback, 'Cannot read the key, most likely the password is wrong') } const newpem = forge.pki.encryptRsaPrivateKey(privateKey, this._()) - return self.store.put(dsname, newpem, (err) => { + util.keyId(privateKey, (err, kid) => { if (err) return _error(callback, err) - this._getKeyInfo(name, callback) + const keyInfo = { + name: name, + id: kid + } + const batch = self.store.batch() + batch.put(dsname, newpem) + batch.put(DsInfoName(name), JSON.stringify(keyInfo)) + batch.commit((err) => { + if (err) return _error(callback, err) + + callback(null, keyInfo) + }) }) } catch (err) { _error(callback, err) @@ -408,10 +462,21 @@ class Keychain { return _error(callback, 'Cannot read the peer private key') } const pem = forge.pki.encryptRsaPrivateKey(privateKey, this._()) - return self.store.put(dsname, pem, (err) => { + util.keyId(privateKey, (err, kid) => { if (err) return _error(callback, err) - this._getKeyInfo(name, callback) + const keyInfo = { + name: name, + id: kid + } + const batch = self.store.batch() + batch.put(dsname, pem) + batch.put(DsInfoName(name), JSON.stringify(keyInfo)) + batch.commit((err) => { + if (err) return _error(callback, err) + + callback(null, keyInfo) + }) }) } catch (err) { _error(callback, err) @@ -426,6 +491,7 @@ class Keychain { * @param {string} name * @param {function(Error, string)} callback * @returns {undefined} + * @private */ _getPrivateKey (name, callback) { if (!validateKeyName(name)) { @@ -438,34 +504,6 @@ class Keychain { callback(null, res.toString()) }) } - - _getKeyInfo (name, callback) { - if (!validateKeyName(name)) { - return _error(callback, `Invalid key name '${name}'`) - } - - const dsname = DsName(name) - this.store.get(dsname, (err, res) => { - if (err) { - return _error(callback, `Key '${name}' does not exist. ${err.message}`) - } - const pem = res.toString() - try { - const privateKey = forge.pki.decryptRsaPrivateKey(pem, this._()) - util.keyId(privateKey, (err, kid) => { - if (err) return _error(callback, err) - - const info = { - name: name, - id: kid - } - return callback(null, info) - }) - } catch (e) { - _error(callback, e) - } - }) - } } module.exports = Keychain diff --git a/test/keychain.spec.js b/test/keychain.spec.js index 75a9f5feba..0883984df3 100644 --- a/test/keychain.spec.js +++ b/test/keychain.spec.js @@ -271,6 +271,16 @@ module.exports = (datastore1, datastore2) => { done() }) }) + + it('key exists', (done) => { + ks.findKeyByName('alice', (err, key) => { + expect(err).to.not.exist() + expect(key).to.exist() + expect(key).to.have.property('name', 'alice') + expect(key).to.have.property('id', alice.toB58String()) + done() + }) + }) }) describe('rename', () => { From ee9dbeb0119bfe0ed2500c8db8fc05a9d3fbfe9b Mon Sep 17 00:00:00 2001 From: Victor Bjelkholm Date: Thu, 14 Dec 2017 18:04:39 +0100 Subject: [PATCH 021/102] Updating CI files This commit updates all CI scripts to the latest version --- .travis.yml | 6 +++++- appveyor.yml | 25 ++++++++++++++----------- ci/Jenkinsfile | 2 ++ circle.yml | 5 +---- 4 files changed, 22 insertions(+), 16 deletions(-) create mode 100644 ci/Jenkinsfile diff --git a/.travis.yml b/.travis.yml index af201abaec..5102ee5ff2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,3 +1,4 @@ +# Warning: This file is automatically synced from https://github.com/ipfs/ci-sync so if you want to change it, please change it there and ask someone to sync all repositories. sudo: false language: node_js @@ -13,12 +14,15 @@ matrix: script: - npm run lint - npm run test - - make test + - npm run coverage before_script: - export DISPLAY=:99.0 - sh -e /etc/init.d/xvfb start +after_success: + - npm run coverage-publish + addons: firefox: 'latest' apt: diff --git a/appveyor.yml b/appveyor.yml index ba93339ba8..046bf91084 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,26 +1,29 @@ +# Warning: This file is automatically synced from https://github.com/ipfs/ci-sync so if you want to change it, please change it there and ask someone to sync all repositories. +version: "{build}" + environment: matrix: - nodejs_version: "6" - nodejs_version: "8" -init: - - git config --global core.autocrlf input +matrix: + fast_finish: true -# cache: -# - node_modules +install: + # Install Node.js + - ps: Install-Product node $env:nodejs_version -platform: - - x64 + # Upgrade npm + - npm install -g npm -install: - - ps: Install-Product node $env:nodejs_version $env:platform + # Output our current versions for debugging - node --version - npm --version + + # Install our package dependencies - npm install test_script: - - npm test + - npm run test:node build: off - -version: "{build}" diff --git a/ci/Jenkinsfile b/ci/Jenkinsfile new file mode 100644 index 0000000000..a7da2e54f3 --- /dev/null +++ b/ci/Jenkinsfile @@ -0,0 +1,2 @@ +// Warning: This file is automatically synced from https://github.com/ipfs/ci-sync so if you want to change it, please change it there and ask someone to sync all repositories. +javascript() diff --git a/circle.yml b/circle.yml index d67b6ae70b..00096937fd 100644 --- a/circle.yml +++ b/circle.yml @@ -1,11 +1,8 @@ +# Warning: This file is automatically synced from https://github.com/ipfs/ci-sync so if you want to change it, please change it there and ask someone to sync all repositories. machine: node: version: stable -test: - post: - - npm run coverage -- --upload - dependencies: pre: - google-chrome --version From 9129d20bcbb424784855f58fe71ee1f66564927b Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Sun, 17 Dec 2017 11:30:52 +1300 Subject: [PATCH 022/102] docs: correct hash name --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index de4b333022..5d90826fe5 100644 --- a/README.md +++ b/README.md @@ -83,7 +83,7 @@ The **key id** is the SHA-256 [multihash](https://github.com/multiformats/multih ### Private key storage -A private key is stored as an encrypted PKCS 8 structure in the PEM format. It is protected by a key generated from the key chain's *passPhrase* using **PBKDF2**. Its file extension is `.p8`. +A private key is stored as an encrypted PKCS 8 structure in the PEM format. It is protected by a key generated from the key chain's *passPhrase* using **PBKDF2**. The default options for generating the derived encryption key are in the `dek` object ```js @@ -94,8 +94,8 @@ const defaultOptions = { dek: { keyLength: 512 / 8, iterationCount: 10000, - salt: 'you should override this value with a crypto secure random number', - hash: 'sha512' + salt: 'at least 16 characters long', + hash: 'sha2-512' } } ``` From e78b2483aee5f6b5540bef598c5c75c686b50c17 Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Sun, 17 Dec 2017 12:43:54 +1300 Subject: [PATCH 023/102] test: key name comparision --- test/keychain.spec.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/keychain.spec.js b/test/keychain.spec.js index 0883984df3..2c781e3fdd 100644 --- a/test/keychain.spec.js +++ b/test/keychain.spec.js @@ -126,7 +126,7 @@ module.exports = (datastore1, datastore2) => { ks.listKeys((err, keys) => { expect(err).to.not.exist() expect(keys).to.exist() - const mykey = keys.find((k) => k.name === rsaKeyName) + const mykey = keys.find((k) => k.name.normalize() === rsaKeyName.normalize()) expect(mykey).to.exist() done() }) From 3b7c691724f5aab64cb4b53db6e03c9b9f6c5f67 Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Sun, 17 Dec 2017 13:36:30 +1300 Subject: [PATCH 024/102] test(openssl): verify key id --- test/openssl.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/openssl.js b/test/openssl.js index 17865d7fdd..4c8134dc2b 100644 --- a/test/openssl.js +++ b/test/openssl.js @@ -146,7 +146,7 @@ DQd8 expect(err).to.not.exist() expect(key).to.exist() expect(key).to.have.property('name', keyName) - expect(key).to.have.property('id') + expect(key).to.have.property('id', 'QmeMWBbuyw8KycYhZVxMzVHK3zLH1mp2DT84X2NApqiXgn') ks.removeKey(keyName, done) }) }) From c1627a99e74d0806e1b3303f744e980856333ac4 Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Thu, 21 Dec 2017 02:43:54 +1300 Subject: [PATCH 025/102] feat: use libp2p-crypto (#18) * test: openssl interop is now the responsibility of libp2p-crypto * feat: use libp2p-crypto, not node-forge, for key management * fix: use libp2p-crypto.pbkdf, not node-forge * fix: do not ship CMS This removes all depencies on node-forge * test: update dependencies * test: remove dead code --- README.md | 7 +- package.json | 12 ++- src/cms.js | 96 ------------------------ src/keychain.js | 168 +++++++++++++++--------------------------- src/util.js | 86 --------------------- test/browser.js | 1 - test/keychain.spec.js | 65 +++------------- test/node.js | 1 - test/openssl.js | 154 -------------------------------------- 9 files changed, 79 insertions(+), 511 deletions(-) delete mode 100644 src/cms.js delete mode 100644 src/util.js delete mode 100644 test/openssl.js diff --git a/README.md b/README.md index 5d90826fe5..a4a0a0ac83 100644 --- a/README.md +++ b/README.md @@ -85,15 +85,14 @@ The **key id** is the SHA-256 [multihash](https://github.com/multiformats/multih A private key is stored as an encrypted PKCS 8 structure in the PEM format. It is protected by a key generated from the key chain's *passPhrase* using **PBKDF2**. -The default options for generating the derived encryption key are in the `dek` object +The default options for generating the derived encryption key are in the `dek` object. This, along with the passPhrase, is the input to a `PBKDF2` function. + ```js const defaultOptions = { - createIfNeeded: true, - //See https://cryptosense.com/parameter-choice-for-pbkdf2/ dek: { keyLength: 512 / 8, - iterationCount: 10000, + iterationCount: 1000, salt: 'at least 16 characters long', hash: 'sha2-512' } diff --git a/package.json b/package.json index f90524fe40..541f308040 100644 --- a/package.json +++ b/package.json @@ -45,22 +45,20 @@ "async": "^2.6.0", "deepmerge": "^1.5.2", "interface-datastore": "~0.4.1", - "libp2p-crypto": "~0.10.3", - "multihashes": "~0.4.12", - "node-forge": "~0.7.1", + "libp2p-crypto": "~0.11.0", "pull-stream": "^3.6.1", "sanitize-filename": "^1.6.1" }, "devDependencies": { - "aegir": "^12.2.0", + "aegir": "^12.3.0", "chai": "^4.1.2", "chai-string": "^1.4.0", - "datastore-fs": "^0.4.1", - "datastore-level": "^0.7.0", + "datastore-fs": "~0.4.1", + "datastore-level": "~0.7.0", "dirty-chai": "^2.0.1", "level-js": "^2.2.4", "mocha": "^4.0.1", - "peer-id": "^0.10.2", + "peer-id": "~0.10.4", "pre-commit": "^1.2.2", "rimraf": "^2.6.2" } diff --git a/src/cms.js b/src/cms.js deleted file mode 100644 index fb66c2dffb..0000000000 --- a/src/cms.js +++ /dev/null @@ -1,96 +0,0 @@ -'use strict' - -const async = require('async') -const forge = require('node-forge') -const util = require('./util') - -class CMS { - constructor (keystore) { - if (!keystore) { - throw new Error('keystore is required') - } - - this.keystore = keystore - } - - createAnonymousEncryptedData (name, plain, callback) { - const self = this - if (!Buffer.isBuffer(plain)) { - return callback(new Error('Data is required')) - } - - self.keystore._getPrivateKey(name, (err, key) => { - if (err) { - return callback(err) - } - - try { - const privateKey = forge.pki.decryptRsaPrivateKey(key, self.keystore._()) - util.certificateForKey(privateKey, (err, certificate) => { - if (err) return callback(err) - - // create a p7 enveloped message - const p7 = forge.pkcs7.createEnvelopedData() - p7.addRecipient(certificate) - p7.content = forge.util.createBuffer(plain) - p7.encrypt() - - // convert message to DER - const der = forge.asn1.toDer(p7.toAsn1()).getBytes() - callback(null, Buffer.from(der, 'binary')) - }) - } catch (err) { - callback(err) - } - }) - } - - readData (cmsData, callback) { - if (!Buffer.isBuffer(cmsData)) { - return callback(new Error('CMS data is required')) - } - - const self = this - let cms - try { - const buf = forge.util.createBuffer(cmsData.toString('binary')) - const obj = forge.asn1.fromDer(buf) - cms = forge.pkcs7.messageFromAsn1(obj) - } catch (err) { - return callback(new Error('Invalid CMS: ' + err.message)) - } - - // Find a recipient whose key we hold. We only deal with recipient certs - // issued by ipfs (O=ipfs). - const recipients = cms.recipients - .filter(r => r.issuer.find(a => a.shortName === 'O' && a.value === 'ipfs')) - .filter(r => r.issuer.find(a => a.shortName === 'CN')) - .map(r => { - return { - recipient: r, - keyId: r.issuer.find(a => a.shortName === 'CN').value - } - }) - async.detect( - recipients, - (r, cb) => self.keystore.findKeyById(r.keyId, (err, info) => cb(null, !err && info)), - (err, r) => { - if (err) return callback(err) - if (!r) return callback(new Error('No key found for decryption')) - - async.waterfall([ - (cb) => self.keystore.findKeyById(r.keyId, cb), - (key, cb) => self.keystore._getPrivateKey(key.name, cb) - ], (err, pem) => { - if (err) return callback(err) - - const privateKey = forge.pki.decryptRsaPrivateKey(pem, self.keystore._()) - cms.decrypt(r.recipient, privateKey) - async.setImmediate(() => callback(null, Buffer.from(cms.content.getBytes(), 'binary'))) - }) - } - ) - } -} - -module.exports = CMS diff --git a/src/keychain.js b/src/keychain.js index 004174d637..3d20504fc1 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -1,11 +1,9 @@ +/* eslint max-nested-callbacks: ["error", 5] */ 'use strict' const sanitize = require('sanitize-filename') -const forge = require('node-forge') const deepmerge = require('deepmerge') const crypto = require('libp2p-crypto') -const util = require('./util') -const CMS = require('./cms') const DS = require('interface-datastore') const pull = require('pull-stream') @@ -19,24 +17,11 @@ const NIST = { minIterationCount: 1000 } -/** - * Maps an IPFS hash name to its forge equivalent. - * - * See https://github.com/multiformats/multihash/blob/master/hashtable.csv - * - * @private - */ -const hashName2Forge = { - sha1: 'sha1', - 'sha2-256': 'sha256', - 'sha2-512': 'sha512' -} - const defaultOptions = { // See https://cryptosense.com/parametesr-choice-for-pbkdf2/ dek: { keyLength: 512 / 8, - iterationCount: 10000, + iterationCount: 1000, salt: 'you should override this value with a crypto secure random number', hash: 'sha2-512' } @@ -133,26 +118,15 @@ class Keychain { if (opts.dek.iterationCount < NIST.minIterationCount) { throw new Error(`dek.iterationCount must be least ${NIST.minIterationCount}`) } - this.dek = opts.dek - - // Get the hashing alogorithm - const hashAlgorithm = hashName2Forge[opts.dek.hash] - if (!hashAlgorithm) { - throw new Error(`dek.hash '${opts.dek.hash}' is unknown or not supported`) - } // Create the derived encrypting key - let dek = forge.pkcs5.pbkdf2( + const dek = crypto.pbkdf2( opts.passPhrase, opts.dek.salt, opts.dek.iterationCount, opts.dek.keyLength, - hashAlgorithm) - dek = forge.util.bytesToHex(dek) + opts.dek.hash) Object.defineProperty(this, '_', { value: () => dek }) - - // Provide access to protected messages - this.cms = new CMS(this) } /** @@ -189,31 +163,32 @@ class Keychain { if (size < 2048) { return _error(callback, `Invalid RSA key size ${size}`) } - forge.pki.rsa.generateKeyPair({bits: size, workers: -1}, (err, keypair) => { + break + default: + break + } + + crypto.keys.generateKeyPair(type, size, (err, keypair) => { + if (err) return _error(callback, err) + keypair.id((err, kid) => { + if (err) return _error(callback, err) + keypair.export(this._(), (err, pem) => { if (err) return _error(callback, err) - util.keyId(keypair.privateKey, (err, kid) => { + const keyInfo = { + name: name, + id: kid + } + const batch = self.store.batch() + batch.put(dsname, pem) + batch.put(DsInfoName(name), JSON.stringify(keyInfo)) + batch.commit((err) => { if (err) return _error(callback, err) - const pem = forge.pki.encryptRsaPrivateKey(keypair.privateKey, this._()) - const keyInfo = { - name: name, - id: kid - } - const batch = self.store.batch() - batch.put(dsname, pem) - batch.put(DsInfoName(name), JSON.stringify(keyInfo)) - batch.commit((err) => { - if (err) return _error(callback, err) - - callback(null, keyInfo) - }) + callback(null, keyInfo) }) }) - break - - default: - return _error(callback, `Invalid key type '${type}'`) - } + }) + }) }) } @@ -372,19 +347,10 @@ class Keychain { return _error(callback, `Key '${name}' does not exist. ${err.message}`) } const pem = res.toString() - try { - const options = { - algorithm: 'aes256', - count: this.dek.iterationCount, - saltSize: NIST.minSaltLength, - prfAlgorithm: 'sha512' - } - const privateKey = forge.pki.decryptRsaPrivateKey(pem, this._()) - const res = forge.pki.encryptRsaPrivateKey(privateKey, password, options) - return callback(null, res) - } catch (e) { - _error(callback, e) - } + crypto.keys.import(pem, this._(), (err, privateKey) => { + if (err) return _error(callback, err) + privateKey.export(password, callback) + }) }) } @@ -409,31 +375,27 @@ class Keychain { self.store.has(dsname, (err, exists) => { if (err) return _error(callback, err) if (exists) return _error(callback, `Key '${name}' already exists`) - try { - const privateKey = forge.pki.decryptRsaPrivateKey(pem, password) - if (privateKey === null) { - return _error(callback, 'Cannot read the key, most likely the password is wrong') - } - const newpem = forge.pki.encryptRsaPrivateKey(privateKey, this._()) - util.keyId(privateKey, (err, kid) => { + crypto.keys.import(pem, password, (err, privateKey) => { + if (err) return _error(callback, 'Cannot read the key, most likely the password is wrong') + privateKey.id((err, kid) => { if (err) return _error(callback, err) - - const keyInfo = { - name: name, - id: kid - } - const batch = self.store.batch() - batch.put(dsname, newpem) - batch.put(DsInfoName(name), JSON.stringify(keyInfo)) - batch.commit((err) => { + privateKey.export(this._(), (err, pem) => { if (err) return _error(callback, err) + const keyInfo = { + name: name, + id: kid + } + const batch = self.store.batch() + batch.put(dsname, pem) + batch.put(DsInfoName(name), JSON.stringify(keyInfo)) + batch.commit((err) => { + if (err) return _error(callback, err) - callback(null, keyInfo) + callback(null, keyInfo) + }) }) }) - } catch (err) { - _error(callback, err) - } + }) }) } @@ -445,42 +407,30 @@ class Keychain { if (!peer || !peer.privKey) { return _error(callback, 'Peer.privKey is required') } + + const privateKey = peer.privKey const dsname = DsName(name) self.store.has(dsname, (err, exists) => { if (err) return _error(callback, err) if (exists) return _error(callback, `Key '${name}' already exists`) - const privateKeyProtobuf = peer.marshalPrivKey() - crypto.keys.unmarshalPrivateKey(privateKeyProtobuf, (err, key) => { + privateKey.id((err, kid) => { if (err) return _error(callback, err) - try { - const der = key.marshal() - const buf = forge.util.createBuffer(der.toString('binary')) - const obj = forge.asn1.fromDer(buf) - const privateKey = forge.pki.privateKeyFromAsn1(obj) - if (privateKey === null) { - return _error(callback, 'Cannot read the peer private key') + privateKey.export(this._(), (err, pem) => { + if (err) return _error(callback, err) + const keyInfo = { + name: name, + id: kid } - const pem = forge.pki.encryptRsaPrivateKey(privateKey, this._()) - util.keyId(privateKey, (err, kid) => { + const batch = self.store.batch() + batch.put(dsname, pem) + batch.put(DsInfoName(name), JSON.stringify(keyInfo)) + batch.commit((err) => { if (err) return _error(callback, err) - const keyInfo = { - name: name, - id: kid - } - const batch = self.store.batch() - batch.put(dsname, pem) - batch.put(DsInfoName(name), JSON.stringify(keyInfo)) - batch.commit((err) => { - if (err) return _error(callback, err) - - callback(null, keyInfo) - }) + callback(null, keyInfo) }) - } catch (err) { - _error(callback, err) - } + }) }) }) } diff --git a/src/util.js b/src/util.js deleted file mode 100644 index 6066c33f2e..0000000000 --- a/src/util.js +++ /dev/null @@ -1,86 +0,0 @@ -'use strict' - -const forge = require('node-forge') -const pki = forge.pki -const multihash = require('multihashes') -const rsaUtils = require('libp2p-crypto/src/keys/rsa-utils') -const rsaClass = require('libp2p-crypto/src/keys/rsa-class') - -exports = module.exports - -// Create an IPFS key id; the SHA-256 multihash of a public key. -// See https://github.com/richardschneider/ipfs-encryption/issues/16 -exports.keyId = (privateKey, callback) => { - try { - const publicKey = pki.setRsaPublicKey(privateKey.n, privateKey.e) - const spki = pki.publicKeyToSubjectPublicKeyInfo(publicKey) - const der = Buffer.from(forge.asn1.toDer(spki).getBytes(), 'binary') - const jwk = rsaUtils.pkixToJwk(der) - const rsa = new rsaClass.RsaPublicKey(jwk) - rsa.hash((err, kid) => { - if (err) return callback(err) - - const kids = multihash.toB58String(kid) - return callback(null, kids) - }) - } catch (err) { - callback(err) - } -} - -exports.certificateForKey = (privateKey, callback) => { - exports.keyId(privateKey, (err, kid) => { - if (err) return callback(err) - - const publicKey = pki.setRsaPublicKey(privateKey.n, privateKey.e) - const cert = pki.createCertificate() - cert.publicKey = publicKey - cert.serialNumber = '01' - cert.validity.notBefore = new Date() - cert.validity.notAfter = new Date() - cert.validity.notAfter.setFullYear(cert.validity.notBefore.getFullYear() + 10) - var attrs = [{ - name: 'organizationName', - value: 'ipfs' - }, { - shortName: 'OU', - value: 'keystore' - }, { - name: 'commonName', - value: kid - }] - cert.setSubject(attrs) - cert.setIssuer(attrs) - cert.setExtensions([{ - name: 'basicConstraints', - cA: true - }, { - name: 'keyUsage', - keyCertSign: true, - digitalSignature: true, - nonRepudiation: true, - keyEncipherment: true, - dataEncipherment: true - }, { - name: 'extKeyUsage', - serverAuth: true, - clientAuth: true, - codeSigning: true, - emailProtection: true, - timeStamping: true - }, { - name: 'nsCertType', - client: true, - server: true, - email: true, - objsign: true, - sslCA: true, - emailCA: true, - objCA: true - }]) - // self-sign certificate - cert.sign(privateKey) - - return callback(null, cert) - }) -} diff --git a/test/browser.js b/test/browser.js index 9584a635c4..4e08b1375a 100644 --- a/test/browser.js +++ b/test/browser.js @@ -23,6 +23,5 @@ describe('browser', () => { }) require('./keychain.spec')(datastore1, datastore2) - require('./openssl')(datastore1) require('./peerid') }) diff --git a/test/keychain.spec.js b/test/keychain.spec.js index 2c781e3fdd..aae21b17ab 100644 --- a/test/keychain.spec.js +++ b/test/keychain.spec.js @@ -16,11 +16,10 @@ module.exports = (datastore1, datastore2) => { const rsaKeyName = 'tajné jméno' const renamedRsaKeyName = 'ชื่อลับ' let rsaKeyInfo - let emptyKeystore + // let emptyKeystore let ks before((done) => { - emptyKeystore = new Keychain(datastore1, { passPhrase: passPhrase }) ks = new Keychain(datastore2, { passPhrase: passPhrase }) done() }) @@ -163,56 +162,6 @@ module.exports = (datastore1, datastore2) => { }) }) - describe('CMS protected data', () => { - const plainData = Buffer.from('This is a message from Alice to Bob') - let cms - - it('service is available', (done) => { - expect(ks).to.have.property('cms') - done() - }) - - it('is anonymous', (done) => { - ks.cms.createAnonymousEncryptedData(rsaKeyName, plainData, (err, msg) => { - expect(err).to.not.exist() - expect(msg).to.exist() - expect(msg).to.be.instanceOf(Buffer) - cms = msg - done() - }) - }) - - it('is a PKCS #7 message', (done) => { - ks.cms.readData('not CMS', (err) => { - expect(err).to.exist() - done() - }) - }) - - it('is a PKCS #7 binary message', (done) => { - ks.cms.readData(plainData, (err) => { - expect(err).to.exist() - done() - }) - }) - - it('cannot be read without the key', (done) => { - emptyKeystore.cms.readData(cms, (err, plain) => { - expect(err).to.exist() - done() - }) - }) - - it('can be read with the key', (done) => { - ks.cms.readData(cms, (err, plain) => { - expect(err).to.not.exist() - expect(plain).to.exist() - expect(plain.toString()).to.equal(plainData.toString()) - done() - }) - }) - }) - describe('exported key', () => { let pemKey @@ -272,7 +221,17 @@ module.exports = (datastore1, datastore2) => { }) }) - it('key exists', (done) => { + it('key id exists', (done) => { + ks.findKeyById(alice.toB58String(), (err, key) => { + expect(err).to.not.exist() + expect(key).to.exist() + expect(key).to.have.property('name', 'alice') + expect(key).to.have.property('id', alice.toB58String()) + done() + }) + }) + + it('key name exists', (done) => { ks.findKeyByName('alice', (err, key) => { expect(err).to.not.exist() expect(key).to.exist() diff --git a/test/node.js b/test/node.js index 634716cf10..b003a7c8ba 100644 --- a/test/node.js +++ b/test/node.js @@ -30,6 +30,5 @@ describe('node', () => { }) require('./keychain.spec')(datastore1, datastore2) - require('./openssl')(datastore1) require('./peerid') }) diff --git a/test/openssl.js b/test/openssl.js deleted file mode 100644 index 4c8134dc2b..0000000000 --- a/test/openssl.js +++ /dev/null @@ -1,154 +0,0 @@ -/* eslint max-nested-callbacks: ["error", 8] */ -/* eslint-env mocha */ -'use strict' - -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) -const Keychain = require('..') - -module.exports = (datastore1) => { - describe('interop with openssl', () => { - const passPhrase = 'this is not a secure phrase' - const keyName = 'openssl-key' - let ks - - before((done) => { - ks = new Keychain(datastore1, { passPhrase: passPhrase }) - done() - }) - - it('can read a private key', (done) => { - /* - * Generated with - * openssl genpkey -algorithm RSA - * -pkeyopt rsa_keygen_bits:3072 - * -pkeyopt rsa_keygen_pubexp:65537 - */ - const pem = `-----BEGIN PRIVATE KEY----- -MIIG/wIBADANBgkqhkiG9w0BAQEFAASCBukwggblAgEAAoIBgQDp0Whyqa8KmdvK -0MsQGJEBzDAEHAZc0C6cr0rkb6Xwo+yB5kjZBRDORk0UXtYGE1pYt4JhUTmMzcWO -v2xTIsdbVMQlNtput2U8kIqS1cSTkX5HxOJtCiIzntMzuR/bGPSOexkyFQ8nCUqb -ROS7cln/ixprra2KMAKldCApN3ue2jo/JI1gyoS8sekhOASAa0ufMPpC+f70sc75 -Y53VLnGBNM43iM/2lsK+GI2a13d6rRy86CEM/ygnh/EDlyNDxo+SQmy6GmSv/lmR -xgWQE2dIfK504KIxFTOphPAQAr9AsmcNnCQLhbz7YTsBz8WcytHGQ0Z5pnBQJ9AV -CX9E6DFHetvs0CNLVw1iEO06QStzHulmNEI/3P8I1TIxViuESJxSu3pSNwG1bSJZ -+Qee24vvlz/slBzK5gZWHvdm46v7vl5z7SA+whncEtjrswd8vkJk9fI/YTUbgOC0 -HWMdc2t/LTZDZ+LUSZ/b2n5trvdJSsOKTjEfuf0wICC08pUUk8MCAwEAAQKCAYEA -ywve+DQCneIezHGk5cVvp2/6ApeTruXalJZlIxsRr3eq2uNwP4X2oirKpPX2RjBo -NMKnpnsyzuOiu+Pf3hJFrTpfWzHXXm5Eq+OZcwnQO5YNY6XGO4qhSNKT9ka9Mzbo -qRKdPrCrB+s5rryVJXKYVSInP3sDSQ2IPsYpZ6GW6Mv56PuFCpjTzElzejV7M0n5 -0bRmn+MZVMVUR54KYiaCywFgUzmr3yfs1cfcsKqMRywt2J58lRy/chTLZ6LILQMv -4V01neVJiRkTmUfIWvc1ENIFM9QJlky9AvA5ASvwTTRz8yOnxoOXE/y4OVyOePjT -cz9eumu9N5dPuUIMmsYlXmRNaeGZPD9bIgKY5zOlfhlfZSuOLNH6EHBNr6JAgfwL -pdP43sbg2SSNKpBZ0iSMvpyTpbigbe3OyhnFH/TyhcC2Wdf62S9/FRsvjlRPbakW -YhKAA2kmJoydcUDO5ccEga8b7NxCdhRiczbiU2cj70pMIuOhDlGAznyxsYbtyxaB -AoHBAPy6Cbt6y1AmuId/HYfvms6i8B+/frD1CKyn+sUDkPf81xSHV7RcNrJi1S1c -V55I0y96HulsR+GmcAW1DF3qivWkdsd/b4mVkizd/zJm3/Dm8p8QOnNTtdWvYoEB -VzfAhBGaR/xflSLxZh2WE8ZHQ3IcRCXV9ZFgJ7PMeTprBJXzl0lTptvrHyo9QK1v -obLrL/KuXWS0ql1uSnJr1vtDI5uW8WU4GDENeU5b/CJHpKpjVxlGg+7pmLknxlBl -oBnZnQKBwQDs2Ky29qZ69qnPWowKceMJ53Z6uoUeSffRZ7xuBjowpkylasEROjuL -nyAihIYB7fd7R74CnRVYLI+O2qXfNKJ8HN+TgcWv8LudkRcnZDSvoyPEJAPyZGfr -olRCXD3caqtarlZO7vXSAl09C6HcL2KZ8FuPIEsuO0Aw25nESMg9eVMaIC6s2eSU -NUt6xfZw1JC0c+f0LrGuFSjxT2Dr5WKND9ageI6afuauMuosjrrOMl2g0dMcSnVz -KrtYa7Wi1N8CgcBFnuJreUplDCWtfgEen40f+5b2yAQYr4fyOFxGxdK73jVJ/HbW -wsh2n+9mDZg9jIZQ/+1gFGpA6V7W06dSf/hD70ihcKPDXSbloUpaEikC7jxMQWY4 -uwjOkwAp1bq3Kxu21a+bAKHO/H1LDTrpVlxoJQ1I9wYtRDXrvBpxU2XyASbeFmNT -FhSByFn27Ve4OD3/NrWXtoVwM5/ioX6ZvUcj55McdTWE3ddbFNACiYX9QlyOI/TY -bhWafDCPmU9fj6kCgcEAjyQEfi9jPj2FM0RODqH1zS6OdG31tfCOTYicYQJyeKSI -/hAezwKaqi9phHMDancfcupQ89Nr6vZDbNrIFLYC3W+1z7hGeabMPNZLYAs3rE60 -dv4tRHlaNRbORazp1iTBmvRyRRI2js3O++3jzOb2eILDUyT5St+UU/LkY7R5EG4a -w1df3idx9gCftXufDWHqcqT6MqFl0QgIzo5izS68+PPxitpRlR3M3Mr4rCU20Rev -blphdF+rzAavYyj1hYuRAoHBANmxwbq+QqsJ19SmeGMvfhXj+T7fNZQFh2F0xwb2 -rMlf4Ejsnx97KpCLUkoydqAs2q0Ws9Nkx2VEVx5KfUD7fWhgbpdnEPnQkfeXv9sD -vZTuAoqInN1+vj1TME6EKR/6D4OtQygSNpecv23EuqEvyXWqRVsRt9Qd2B0H4k7h -gnjREs10u7zyqBIZH7KYVgyh27WxLr859ap8cKAH6Fb+UOPtZo3sUeeume60aebn -4pMwXeXP+LO8NIfRXV8mgrm86g== ------END PRIVATE KEY----- -` - ks.importKey(keyName, pem, '', (err, key) => { - expect(err).to.not.exist() - expect(key).to.exist() - expect(key).to.have.property('name', keyName) - expect(key).to.have.property('id') - ks.removeKey(keyName, done) - }) - }) - - // TODO: net.forge can not cope with this - // Uncaught AssertionError: expected [Error: Cannot read encrypted PBE data block. Unsupported OID.] to not exist - it.skip('can read a private encrypted key (v1)', (done) => { - /* - * Generated with - * openssl genpkey -algorithm RSA - * -pkeyopt rsa_keygen_bits:1024 - * -pkeyopt rsa_keygen_pubexp:65537 - * -out foo.pem - * openssl pkcs8 -in foo.pem -topk8 -passout pass:mypassword - */ - const pem = `-----BEGIN ENCRYPTED PRIVATE KEY----- -MIICoTAbBgkqhkiG9w0BBQMwDgQI2563Jugj/KkCAggABIICgPxHkKtUUE8EWevq -eX9nTjqpbsv0QoXQMhegfxDELJLU8tj6V0bWNt7QDdfQ1n6FRgnNvNGick6gyqHH -yH9qC2oXwkDFP7OrHp2NEZd7DHQLLc+L4KJ/0dzsiZ1U9no7XzQMUay9Bc918ADE -pN2/EqigWkaG4gNjkAeKWr6+BNRevDXlSvls7YDboNcTiACi5zJkthivB9g3vT1m -gPdN6Gf/mmqtBTDHeqj5QsmXYqeCyo5b26JgYsziABVZDHph4ekPUsTvudRpE9Ex -baXwdYEAZxVpSbTvQ3A5qysjSZeM9ttfRTSSwL391q7dViz4+aujpk0Vj7piH+1B -CkfO8/XudRdRlnOe+KjMidktKCsMGCIOW92IlfMvIQ/Zn1GTYj9bRXONFNJ2WPND -UmCKnL7cmworwg/weRorrGKBWIGspU+tDASOPSvIGKo6Hoxm4CN1TpDRY7DAGlgm -Y3TEbMYfpXyzkPjvAhJDt03D3J9PrTO6uM5d7YUaaTmJ2TQFQVF2Lc3Uz8lDJLs0 -ZYtfQ/4H+YY2RrX7ua7t6ArUcYXZtv0J4lRYWjwV8fGPUVc0d8xLJU0Yjf4BD7K8 -rsavHo9b5YvBUX7SgUyxAEembEOe3SjQ+gPu2U5wovcjUuC9eItEEsXGrx30BQ0E -8BtK2+hp0eMkW5/BYckJkH+Yl8ypbzRGRRIZzLgeI4JveSx/mNhewfgTr+ORPThZ -mBdkD5r+ixWF174naw53L8U9wF8kiK7pIE1N9TR4USEeovLwX6Ni/2MMDZedOfof -2f77eUdLsK19/5/lcgAAYaXauXWhy2d2r3SayFrC9woy0lh2VLKRMBjcx1oWb7dp -0uxzo5Y= ------END ENCRYPTED PRIVATE KEY----- -` - ks.importKey(keyName, pem, 'mypassword', (err, key) => { - expect(err).to.not.exist() - expect(key).to.exist() - expect(key).to.have.property('name', keyName) - expect(key).to.have.property('id') - ks.removeKey(keyName, done) - }) - }) - - it('can read a private encrypted key (v2)', (done) => { - /* - * Generated with - * openssl genpkey -algorithm RSA - * -pkeyopt rsa_keygen_bits:1024 - * -pkeyopt rsa_keygen_pubexp:65537 - * -out foo.pem - * openssl pkcs8 -in foo.pem -topk8 -v2 aes-256-cbc -passout pass:mypassword - */ - const pem = `-----BEGIN ENCRYPTED PRIVATE KEY----- -MIICzzBJBgkqhkiG9w0BBQ0wPDAbBgkqhkiG9w0BBQwwDgQIhuL894loRucCAggA -MB0GCWCGSAFlAwQBKgQQEoEtsjW3iC9/u0uGvkxX7wSCAoAsX3l6JoR2OGbT8CkY -YT3RQFqquOgItYOHw6E3tir2YrmxEAo99nxoL8pdto37KSC32eAGnfv5R1zmHHSx -0M3/y2AWiCBTX95EEzdtGC1hK3PBa/qpp/xEmcrsjYN6NXxMAkhC0hMP/HdvqMAg -ee7upvaYJsJcl8QLFNayAWr8b8cZA/RBhGEIRl59Eyj6nNtxDt3bCrfe06o1CPCV -50/fRZEwFOi/C6GYvPN6MrPZO3ALBWgopLT2yQqycTKtfxYWIdOsMBkAjKf2D6Pk -u2mqBsaP4b71jIIeT4euSJLsoJV+O39s8YHXtW8GtOqp7V5kIlnm90lZ9wzeLTZ7 -HJsD/jEdYto5J3YWm2wwEDccraffJSm7UDtJBvQdIx832kxeFCcGQjW38Zl1qqkg -iTH1PLTypxj2ZuviS2EkXVFb/kVU6leWwOt6fqWFC58UvJKeCk/6veazz3PDnTWM -92ClUqFd+CZn9VT4CIaJaAc6v5NLpPp+T9sRX9AtequPm7FyTeevY9bElfyk9gW9 -JDKgKxs6DGWDa16RL5vzwtU+G3o6w6IU+mEwa6/c+hN+pRFs/KBNLLSP9OHBx7BJ -X/32Ft+VFhJaK+lQ+f+hve7od/bgKnz4c/Vtp7Dh51DgWgCpBgb8p0vqu02vTnxD -BXtDv3h75l5PhvdWfVIzpMWRYFvPR+vJi066FjAz2sjYc0NMLSYtZWyWoIInjhoX -Dp5CQujCtw/ZSSlwde1DKEWAW4SeDZAOQNvuz0rU3eosNUJxEmh3aSrcrRtDpw+Y -mBUuWAZMpz7njBi7h+JDfmSW/GAaMwrVFC2gef5375R0TejAh+COAjItyoeYEvv8 -DQd8 ------END ENCRYPTED PRIVATE KEY----- -` - ks.importKey(keyName, pem, 'mypassword', (err, key) => { - expect(err).to.not.exist() - expect(key).to.exist() - expect(key).to.have.property('name', keyName) - expect(key).to.have.property('id', 'QmeMWBbuyw8KycYhZVxMzVHK3zLH1mp2DT84X2NApqiXgn') - ks.removeKey(keyName, done) - }) - }) - }) -} From 5343b0f2de92fd290108a4d6016988b84ff9eba0 Mon Sep 17 00:00:00 2001 From: David Dias Date: Wed, 20 Dec 2017 13:50:56 +0000 Subject: [PATCH 026/102] chore: update deps --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 541f308040..55b3b5a134 100644 --- a/package.json +++ b/package.json @@ -44,7 +44,7 @@ "dependencies": { "async": "^2.6.0", "deepmerge": "^1.5.2", - "interface-datastore": "~0.4.1", + "interface-datastore": "~0.4.2", "libp2p-crypto": "~0.11.0", "pull-stream": "^3.6.1", "sanitize-filename": "^1.6.1" @@ -53,7 +53,7 @@ "aegir": "^12.3.0", "chai": "^4.1.2", "chai-string": "^1.4.0", - "datastore-fs": "~0.4.1", + "datastore-fs": "~0.4.2", "datastore-level": "~0.7.0", "dirty-chai": "^2.0.1", "level-js": "^2.2.4", From 21611e437d46879f3d371a2b55e3249a91612684 Mon Sep 17 00:00:00 2001 From: David Dias Date: Wed, 20 Dec 2017 13:52:09 +0000 Subject: [PATCH 027/102] chore: update contributors --- package.json | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index 55b3b5a134..1fc3591fc7 100644 --- a/package.json +++ b/package.json @@ -61,5 +61,11 @@ "peer-id": "~0.10.4", "pre-commit": "^1.2.2", "rimraf": "^2.6.2" - } + }, + "contributors": [ + "David Dias ", + "Maciej Krüger ", + "Richard Schneider ", + "Victor Bjelkholm " + ] } From de15d129ddd5d7e2720ebbd070e6719d318ad304 Mon Sep 17 00:00:00 2001 From: David Dias Date: Wed, 20 Dec 2017 13:52:09 +0000 Subject: [PATCH 028/102] chore: release version v0.2.0 --- CHANGELOG.md | 22 ++++++++++++++++++++++ package.json | 2 +- 2 files changed, 23 insertions(+), 1 deletion(-) create mode 100644 CHANGELOG.md diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000000..5c79f5018d --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,22 @@ + +# 0.2.0 (2017-12-20) + + +### Bug Fixes + +* error message ([8305d20](https://github.com/libp2p/js-libp2p-keychain/commit/8305d20)) +* lint errors ([06917f7](https://github.com/libp2p/js-libp2p-keychain/commit/06917f7)) +* lint errors ([ff4f656](https://github.com/libp2p/js-libp2p-keychain/commit/ff4f656)) +* linting ([409a999](https://github.com/libp2p/js-libp2p-keychain/commit/409a999)) +* maps an IPFS hash name to its forge equivalent ([f71d3a6](https://github.com/libp2p/js-libp2p-keychain/commit/f71d3a6)), closes [#12](https://github.com/libp2p/js-libp2p-keychain/issues/12) +* more linting ([7c44c91](https://github.com/libp2p/js-libp2p-keychain/commit/7c44c91)) +* return info on removed key [#10](https://github.com/libp2p/js-libp2p-keychain/issues/10) ([f49e753](https://github.com/libp2p/js-libp2p-keychain/commit/f49e753)) + + +### Features + +* move bits from https://github.com/richardschneider/ipfs-encryption ([1a96ae8](https://github.com/libp2p/js-libp2p-keychain/commit/1a96ae8)) +* use libp2p-crypto ([#18](https://github.com/libp2p/js-libp2p-keychain/issues/18)) ([c1627a9](https://github.com/libp2p/js-libp2p-keychain/commit/c1627a9)) + + + diff --git a/package.json b/package.json index 1fc3591fc7..f70401aef9 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "libp2p-keychain", - "version": "0.1.0", + "version": "0.2.0", "description": "Key management and cryptographically protected messages", "main": "src/index.js", "scripts": { From 89a451c147a98be1d3fbbf4a95fc00ab853b1451 Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Thu, 28 Dec 2017 21:48:32 +1300 Subject: [PATCH 029/102] feat: generate unique options for a key chain (#20) --- src/keychain.js | 12 ++++++++++++ test/keychain.spec.js | 7 +++++++ 2 files changed, 19 insertions(+) diff --git a/src/keychain.js b/src/keychain.js index 3d20504fc1..2814834125 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -138,6 +138,18 @@ class Keychain { return defaultOptions } + /** + * Generates the options for a keychain. A random salt is produced. + * + * @returns {object} + */ + static generateOptions () { + const options = Object.assign({}, defaultOptions) + const saltLength = Math.ceil(NIST.minSaltLength / 3) * 3 // no base64 padding + options.dek.salt = crypto.randomBytes(saltLength).toString('base64') + return options + } + /** * Create a new key. * diff --git a/test/keychain.spec.js b/test/keychain.spec.js index aae21b17ab..32112dc5ed 100644 --- a/test/keychain.spec.js +++ b/test/keychain.spec.js @@ -46,6 +46,13 @@ module.exports = (datastore1, datastore2) => { expect(() => new Keychain(datastore2, { passPhrase: passPhrase, dek: { hash: 'my-hash' } })).to.throw() }) + it('can generate options', () => { + const options = Keychain.generateOptions() + options.passPhrase = passPhrase + const chain = new Keychain(datastore2, options) + expect(chain).to.exist() + }) + describe('key name', () => { it('is a valid filename and non-ASCII', () => { ks.removeKey('../../nasty', (err) => { From 849a7c75d047c0de23ea20f6706ad8a37e04cb6a Mon Sep 17 00:00:00 2001 From: David Dias Date: Thu, 28 Dec 2017 08:51:25 +0000 Subject: [PATCH 030/102] chore: update contributors From 6a84873a0a77a84b5ad00234d0ee8f688aa598b7 Mon Sep 17 00:00:00 2001 From: David Dias Date: Thu, 28 Dec 2017 08:51:26 +0000 Subject: [PATCH 031/102] chore: release version v0.2.1 --- CHANGELOG.md | 10 ++++++++++ package.json | 2 +- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5c79f5018d..79011d7428 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,13 @@ + +## [0.2.1](https://github.com/libp2p/js-libp2p-keychain/compare/v0.2.0...v0.2.1) (2017-12-28) + + +### Features + +* generate unique options for a key chain ([#20](https://github.com/libp2p/js-libp2p-keychain/issues/20)) ([89a451c](https://github.com/libp2p/js-libp2p-keychain/commit/89a451c)) + + + # 0.2.0 (2017-12-20) diff --git a/package.json b/package.json index f70401aef9..bbc1e64fcc 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "libp2p-keychain", - "version": "0.2.0", + "version": "0.2.1", "description": "Key management and cryptographically protected messages", "main": "src/index.js", "scripts": { From 1e276f6e94b7259773cdcca54a8d5dbe3ed37e8b Mon Sep 17 00:00:00 2001 From: David Dias Date: Sun, 28 Jan 2018 20:14:57 -0800 Subject: [PATCH 032/102] chore: update deps --- package.json | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index bbc1e64fcc..e3f681abb4 100644 --- a/package.json +++ b/package.json @@ -43,22 +43,22 @@ "homepage": "https://github.com/libp2p/js-libp2p-keychain#readme", "dependencies": { "async": "^2.6.0", - "deepmerge": "^1.5.2", + "deepmerge": "^2.0.1", "interface-datastore": "~0.4.2", - "libp2p-crypto": "~0.11.0", + "libp2p-crypto": "~0.12.0", "pull-stream": "^3.6.1", "sanitize-filename": "^1.6.1" }, "devDependencies": { - "aegir": "^12.3.0", + "aegir": "^12.4.0", "chai": "^4.1.2", "chai-string": "^1.4.0", "datastore-fs": "~0.4.2", "datastore-level": "~0.7.0", "dirty-chai": "^2.0.1", "level-js": "^2.2.4", - "mocha": "^4.0.1", - "peer-id": "~0.10.4", + "mocha": "^5.0.0", + "peer-id": "~0.10.5", "pre-commit": "^1.2.2", "rimraf": "^2.6.2" }, From 2ce44446a2472ca594133e04c2269f8c627c6ea0 Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Mon, 29 Jan 2018 18:44:51 +1300 Subject: [PATCH 033/102] fix: deepmerge 2.0.1 fails in browser, stay with 1.5.2 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index e3f681abb4..955a40a3aa 100644 --- a/package.json +++ b/package.json @@ -43,7 +43,7 @@ "homepage": "https://github.com/libp2p/js-libp2p-keychain#readme", "dependencies": { "async": "^2.6.0", - "deepmerge": "^2.0.1", + "deepmerge": "^1.5.2", "interface-datastore": "~0.4.2", "libp2p-crypto": "~0.12.0", "pull-stream": "^3.6.1", From 3816b8207fdd98ad2d3884e0edf9a2ff6a203720 Mon Sep 17 00:00:00 2001 From: David Dias Date: Sun, 28 Jan 2018 22:22:59 -0800 Subject: [PATCH 034/102] chore: update contributors From acf48a8efe33f0bb4c7ae0f8f9a55ff2ab378168 Mon Sep 17 00:00:00 2001 From: David Dias Date: Sun, 28 Jan 2018 22:22:59 -0800 Subject: [PATCH 035/102] chore: release version v0.3.0 --- CHANGELOG.md | 10 ++++++++++ package.json | 2 +- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 79011d7428..74e2782182 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,13 @@ + +# [0.3.0](https://github.com/libp2p/js-libp2p-keychain/compare/v0.2.1...v0.3.0) (2018-01-29) + + +### Bug Fixes + +* deepmerge 2.0.1 fails in browser, stay with 1.5.2 ([2ce4444](https://github.com/libp2p/js-libp2p-keychain/commit/2ce4444)) + + + ## [0.2.1](https://github.com/libp2p/js-libp2p-keychain/compare/v0.2.0...v0.2.1) (2017-12-28) diff --git a/package.json b/package.json index 955a40a3aa..13e0a85f47 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "libp2p-keychain", - "version": "0.2.1", + "version": "0.3.0", "description": "Key management and cryptographically protected messages", "main": "src/index.js", "scripts": { From 5560669fc9df1bf94374f9e0218b2042409765ce Mon Sep 17 00:00:00 2001 From: Richard Schneider Date: Mon, 29 Jan 2018 19:34:55 +1300 Subject: [PATCH 036/102] CMS - PKCS #7 (#19) CMS - PKCS #7 --- .travis.yml | 4 -- README.md | 8 ++- src/cms.js | 142 ++++++++++++++++++++++++++++++++++++++++++ src/keychain.js | 30 ++++++--- src/util.js | 70 +++++++++++++++++++++ test/browser.js | 1 + test/cms-interop.js | 73 ++++++++++++++++++++++ test/keychain.spec.js | 69 +++++++++++++++++++- test/node.js | 1 + 9 files changed, 384 insertions(+), 14 deletions(-) create mode 100644 src/cms.js create mode 100644 src/util.js create mode 100644 test/cms-interop.js diff --git a/.travis.yml b/.travis.yml index 5102ee5ff2..a456ff1244 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,15 +14,11 @@ matrix: script: - npm run lint - npm run test - - npm run coverage before_script: - export DISPLAY=:99.0 - sh -e /etc/init.d/xvfb start -after_success: - - npm run coverage-publish - addons: firefox: 'latest' apt: diff --git a/README.md b/README.md index a4a0a0ac83..a3b1c33ecc 100644 --- a/README.md +++ b/README.md @@ -65,8 +65,8 @@ A naming service for a key Cryptographically protected messages -- `cms.createAnonymousEncryptedData (name, plain, callback)` -- `cms.readData (cmsData, callback)` +- `cms.encrypt (name, plain, callback)` +- `cms.decrypt (cmsData, callback)` ### KeyInfo @@ -105,6 +105,10 @@ const defaultOptions = { The actual physical storage of an encrypted key is left to implementations of [interface-datastore](https://github.com/ipfs/interface-datastore/). A key benifit is that now the key chain can be used in browser with the [js-datastore-level](https://github.com/ipfs/js-datastore-level) implementation. +### Cryptographic Message Syntax (CMS) + +CMS, aka [PKCS #7](https://en.wikipedia.org/wiki/PKCS) and [RFC 5652](https://tools.ietf.org/html/rfc5652), describes an encapsulation syntax for data protection. It is used to digitally sign, digest, authenticate, or encrypt arbitrary message content. Basically, `cms.encrypt` creates a DER message that can be only be read by someone holding the private key. + ## Contribute Feel free to join in. All welcome. Open an [issue](https://github.com/libp2p/js-libp2p-crypto/issues)! diff --git a/src/cms.js b/src/cms.js new file mode 100644 index 0000000000..937063cc25 --- /dev/null +++ b/src/cms.js @@ -0,0 +1,142 @@ +'use strict' + +const async = require('async') +const forge = require('node-forge') +const util = require('./util') + +/** + * Cryptographic Message Syntax (aka PKCS #7) + * + * CMS describes an encapsulation syntax for data protection. It + * is used to digitally sign, digest, authenticate, or encrypt + * arbitrary message content. + * + * See RFC 5652 for all the details. + */ +class CMS { + /** + * Creates a new instance with a keychain + * + * @param {Keychain} keychain - the available keys + */ + constructor (keychain) { + if (!keychain) { + throw new Error('keychain is required') + } + + this.keychain = keychain + } + + /** + * Creates some protected data. + * + * The output Buffer contains the PKCS #7 message in DER. + * + * @param {string} name - The local key name. + * @param {Buffer} plain - The data to encrypt. + * @param {function(Error, Buffer)} callback + * @returns {undefined} + */ + encrypt (name, plain, callback) { + const self = this + const done = (err, result) => async.setImmediate(() => callback(err, result)) + + if (!Buffer.isBuffer(plain)) { + return done(new Error('Plain data must be a Buffer')) + } + + async.series([ + (cb) => self.keychain.findKeyByName(name, cb), + (cb) => self.keychain._getPrivateKey(name, cb) + ], (err, results) => { + if (err) return done(err) + + let key = results[0] + let pem = results[1] + try { + const privateKey = forge.pki.decryptRsaPrivateKey(pem, self.keychain._()) + util.certificateForKey(key, privateKey, (err, certificate) => { + if (err) return callback(err) + + // create a p7 enveloped message + const p7 = forge.pkcs7.createEnvelopedData() + p7.addRecipient(certificate) + p7.content = forge.util.createBuffer(plain) + p7.encrypt() + + // convert message to DER + const der = forge.asn1.toDer(p7.toAsn1()).getBytes() + done(null, Buffer.from(der, 'binary')) + }) + } catch (err) { + done(err) + } + }) + } + + /** + * Reads some protected data. + * + * The keychain must contain one of the keys used to encrypt the data. If none of the keys + * exists, an Error is returned with the property 'missingKeys'. It is array of key ids. + * + * @param {Buffer} cmsData - The CMS encrypted data to decrypt. + * @param {function(Error, Buffer)} callback + * @returns {undefined} + */ + decrypt (cmsData, callback) { + const done = (err, result) => async.setImmediate(() => callback(err, result)) + + if (!Buffer.isBuffer(cmsData)) { + return done(new Error('CMS data is required')) + } + + const self = this + let cms + try { + const buf = forge.util.createBuffer(cmsData.toString('binary')) + const obj = forge.asn1.fromDer(buf) + cms = forge.pkcs7.messageFromAsn1(obj) + } catch (err) { + return done(new Error('Invalid CMS: ' + err.message)) + } + + // Find a recipient whose key we hold. We only deal with recipient certs + // issued by ipfs (O=ipfs). + const recipients = cms.recipients + .filter(r => r.issuer.find(a => a.shortName === 'O' && a.value === 'ipfs')) + .filter(r => r.issuer.find(a => a.shortName === 'CN')) + .map(r => { + return { + recipient: r, + keyId: r.issuer.find(a => a.shortName === 'CN').value + } + }) + async.detect( + recipients, + (r, cb) => self.keychain.findKeyById(r.keyId, (err, info) => cb(null, !err && info)), + (err, r) => { + if (err) return done(err) + if (!r) { + const missingKeys = recipients.map(r => r.keyId) + err = new Error('Decryption needs one of the key(s): ' + missingKeys.join(', ')) + err.missingKeys = missingKeys + return done(err) + } + + async.waterfall([ + (cb) => self.keychain.findKeyById(r.keyId, cb), + (key, cb) => self.keychain._getPrivateKey(key.name, cb) + ], (err, pem) => { + if (err) return done(err) + + const privateKey = forge.pki.decryptRsaPrivateKey(pem, self.keychain._()) + cms.decrypt(r.recipient, privateKey) + done(null, Buffer.from(cms.content.getBytes(), 'binary')) + }) + } + ) + } +} + +module.exports = CMS diff --git a/src/keychain.js b/src/keychain.js index 2814834125..41f5c1c4ce 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -6,6 +6,7 @@ const deepmerge = require('deepmerge') const crypto = require('libp2p-crypto') const DS = require('interface-datastore') const pull = require('pull-stream') +const CMS = require('./cms') const keyPrefix = '/pkcs8/' const infoPrefix = '/info/' @@ -21,7 +22,7 @@ const defaultOptions = { // See https://cryptosense.com/parametesr-choice-for-pbkdf2/ dek: { keyLength: 512 / 8, - iterationCount: 1000, + iterationCount: 10000, salt: 'you should override this value with a crypto secure random number', hash: 'sha2-512' } @@ -86,8 +87,8 @@ function DsInfoName (name) { * Manages the lifecycle of a key. Keys are encrypted at rest using PKCS #8. * * A key in the store has two entries - * - '/info/key-name', contains the KeyInfo for the key - * - '/pkcs8/key-name', contains the PKCS #8 for the key + * - '/info/*key-name*', contains the KeyInfo for the key + * - '/pkcs8/*key-name*', contains the PKCS #8 for the key * */ class Keychain { @@ -130,12 +131,17 @@ class Keychain { } /** - * The default options for a keychain. + * Gets an object that can encrypt/decrypt protected data + * using the Cryptographic Message Syntax (CMS). * - * @returns {object} + * CMS describes an encapsulation syntax for data protection. It + * is used to digitally sign, digest, authenticate, or encrypt + * arbitrary message content. + * + * @returns {CMS} */ - static get options () { - return defaultOptions + get cms () { + return new CMS(this) } /** @@ -150,6 +156,16 @@ class Keychain { return options } + /** + * Gets an object that can encrypt/decrypt protected data. + * The default options for a keychain. + * + * @returns {object} + */ + static get options () { + return defaultOptions + } + /** * Create a new key. * diff --git a/src/util.js b/src/util.js new file mode 100644 index 0000000000..9aa248ff9a --- /dev/null +++ b/src/util.js @@ -0,0 +1,70 @@ +'use strict' + +const forge = require('node-forge') +const pki = forge.pki +exports = module.exports + +/** + * Gets a self-signed X.509 certificate for the key. + * + * The output Buffer contains the PKCS #7 message in DER. + * + * TODO: move to libp2p-crypto package + * + * @param {KeyInfo} key - The id and name of the key + * @param {RsaPrivateKey} privateKey - The naked key + * @param {function(Error, Certificate)} callback + * @returns {undefined} + */ +exports.certificateForKey = (key, privateKey, callback) => { + const publicKey = pki.setRsaPublicKey(privateKey.n, privateKey.e) + const cert = pki.createCertificate() + cert.publicKey = publicKey + cert.serialNumber = '01' + cert.validity.notBefore = new Date() + cert.validity.notAfter = new Date() + cert.validity.notAfter.setFullYear(cert.validity.notBefore.getFullYear() + 10) + const attrs = [{ + name: 'organizationName', + value: 'ipfs' + }, { + shortName: 'OU', + value: 'keystore' + }, { + name: 'commonName', + value: key.id + }] + cert.setSubject(attrs) + cert.setIssuer(attrs) + cert.setExtensions([{ + name: 'basicConstraints', + cA: true + }, { + name: 'keyUsage', + keyCertSign: true, + digitalSignature: true, + nonRepudiation: true, + keyEncipherment: true, + dataEncipherment: true + }, { + name: 'extKeyUsage', + serverAuth: true, + clientAuth: true, + codeSigning: true, + emailProtection: true, + timeStamping: true + }, { + name: 'nsCertType', + client: true, + server: true, + email: true, + objsign: true, + sslCA: true, + emailCA: true, + objCA: true + }]) + // self-sign certificate + cert.sign(privateKey) + + return callback(null, cert) +} diff --git a/test/browser.js b/test/browser.js index 4e08b1375a..e1aa2b00ad 100644 --- a/test/browser.js +++ b/test/browser.js @@ -23,5 +23,6 @@ describe('browser', () => { }) require('./keychain.spec')(datastore1, datastore2) + require('./cms-interop')(datastore2) require('./peerid') }) diff --git a/test/cms-interop.js b/test/cms-interop.js new file mode 100644 index 0000000000..a7449984fd --- /dev/null +++ b/test/cms-interop.js @@ -0,0 +1,73 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +chai.use(require('chai-string')) +const Keychain = require('..') + +module.exports = (datastore) => { + describe('cms interop', () => { + const passPhrase = 'this is not a secure phrase' + const aliceKeyName = 'cms-interop-alice' + let ks + + before((done) => { + ks = new Keychain(datastore, { passPhrase: passPhrase }) + done() + }) + + const plainData = Buffer.from('This is a message from Alice to Bob') + + it('imports openssl key', function (done) { + this.timeout(10 * 1000) + const aliceKid = 'QmNzBqPwp42HZJccsLtc4ok6LjZAspckgs2du5tTmjPfFA' + const alice = `-----BEGIN ENCRYPTED PRIVATE KEY----- +MIICxjBABgkqhkiG9w0BBQ0wMzAbBgkqhkiG9w0BBQwwDgQIMhYqiVoLJMICAggA +MBQGCCqGSIb3DQMHBAhU7J9bcJPLDQSCAoDzi0dP6z97wJBs3jK2hDvZYdoScknG +QMPOnpG1LO3IZ7nFha1dta5liWX+xRFV04nmVYkkNTJAPS0xjJOG9B5Hm7wm8uTd +1rOaYKOW5S9+1sD03N+fAx9DDFtB7OyvSdw9ty6BtHAqlFk3+/APASJS12ak2pg7 +/Ei6hChSYYRS9WWGw4lmSitOBxTmrPY1HmODXkR3txR17LjikrMTd6wyky9l/u7A +CgkMnj1kn49McOBJ4gO14c9524lw9OkPatyZK39evFhx8AET73LrzCnsf74HW9Ri +dKq0FiKLVm2wAXBZqdd5ll/TPj3wmFqhhLSj/txCAGg+079gq2XPYxxYC61JNekA +ATKev5zh8x1Mf1maarKN72sD28kS/J+aVFoARIOTxbG3g+1UbYs/00iFcuIaM4IY +zB1kQUFe13iWBsJ9nfvN7TJNSVnh8NqHNbSg0SdzKlpZHHSWwOUrsKmxmw/XRVy/ +ufvN0hZQ3BuK5MZLixMWAyKc9zbZSOB7E7VNaK5Fmm85FRz0L1qRjHvoGcEIhrOt +0sjbsRvjs33J8fia0FF9nVfOXvt/67IGBKxIMF9eE91pY5wJNwmXcBk8jghTZs83 +GNmMB+cGH1XFX4cT4kUGzvqTF2zt7IP+P2cQTS1+imKm7r8GJ7ClEZ9COWWdZIcH +igg5jozKCW82JsuWSiW9tu0F/6DuvYiZwHS3OLiJP0CuLfbOaRw8Jia1RTvXEH7m +3N0/kZ8hJIK4M/t/UAlALjeNtFxYrFgsPgLxxcq7al1ruG7zBq8L/G3RnkSjtHqE +cn4oisOvxCprs4aM9UVjtZTCjfyNpX8UWwT1W3rySV+KQNhxuMy3RzmL +-----END ENCRYPTED PRIVATE KEY----- +` + ks.importKey(aliceKeyName, alice, 'mypassword', (err, key) => { + expect(err).to.not.exist() + expect(key.name).to.equal(aliceKeyName) + expect(key.id).to.equal(aliceKid) + done() + }) + }) + + it('decrypts node-forge example', (done) => { + const example = ` +MIIBcwYJKoZIhvcNAQcDoIIBZDCCAWACAQAxgfowgfcCAQAwYDBbMQ0wCwYDVQQK +EwRpcGZzMREwDwYDVQQLEwhrZXlzdG9yZTE3MDUGA1UEAxMuUW1OekJxUHdwNDJI +WkpjY3NMdGM0b2s2TGpaQXNwY2tnczJkdTV0VG1qUGZGQQIBATANBgkqhkiG9w0B +AQEFAASBgLKXCZQYmMLuQ8m0Ex/rr3KNK+Q2+QG1zIbIQ9MFPUNQ7AOgGOHyL40k +d1gr188EHuiwd90PafZoQF9VRSX9YtwGNqAE8+LD8VaITxCFbLGRTjAqeOUHR8cO +knU1yykWGkdlbclCuu0NaAfmb8o0OX50CbEKZB7xmsv8tnqn0H0jMF4GCSqGSIb3 +DQEHATAdBglghkgBZQMEASoEEP/PW1JWehQx6/dsLkp/Mf+gMgQwFM9liLTqC56B +nHILFmhac/+a/StQOKuf9dx5qXeGvt9LnwKuGGSfNX4g+dTkoa6N +` + ks.cms.decrypt(Buffer.from(example, 'base64'), (err, plain) => { + expect(err).to.not.exist() + expect(plain).to.exist() + expect(plain.toString()).to.equal(plainData.toString()) + done() + }) + }) + }) +} diff --git a/test/keychain.spec.js b/test/keychain.spec.js index 32112dc5ed..ae78cb1e7b 100644 --- a/test/keychain.spec.js +++ b/test/keychain.spec.js @@ -16,11 +16,12 @@ module.exports = (datastore1, datastore2) => { const rsaKeyName = 'tajné jméno' const renamedRsaKeyName = 'ชื่อลับ' let rsaKeyInfo - // let emptyKeystore + let emptyKeystore let ks before((done) => { ks = new Keychain(datastore2, { passPhrase: passPhrase }) + emptyKeystore = new Keychain(datastore1, { passPhrase: passPhrase }) done() }) @@ -169,6 +170,72 @@ module.exports = (datastore1, datastore2) => { }) }) + describe('CMS protected data', () => { + const plainData = Buffer.from('This is a message from Alice to Bob') + let cms + + it('service is available', (done) => { + expect(ks).to.have.property('cms') + done() + }) + + it('requires a key', (done) => { + ks.cms.encrypt('no-key', plainData, (err, msg) => { + expect(err).to.exist() + done() + }) + }) + + it('requires plain data as a Buffer', (done) => { + ks.cms.encrypt(rsaKeyName, 'plain data', (err, msg) => { + expect(err).to.exist() + done() + }) + }) + + it('encrypts', (done) => { + ks.cms.encrypt(rsaKeyName, plainData, (err, msg) => { + expect(err).to.not.exist() + expect(msg).to.exist() + expect(msg).to.be.instanceOf(Buffer) + cms = msg + done() + }) + }) + + it('is a PKCS #7 message', (done) => { + ks.cms.decrypt('not CMS', (err) => { + expect(err).to.exist() + done() + }) + }) + + it('is a PKCS #7 binary message', (done) => { + ks.cms.decrypt(plainData, (err) => { + expect(err).to.exist() + done() + }) + }) + + it('cannot be read without the key', (done) => { + emptyKeystore.cms.decrypt(cms, (err, plain) => { + expect(err).to.exist() + expect(err).to.have.property('missingKeys') + expect(err.missingKeys).to.eql([rsaKeyInfo.id]) + done() + }) + }) + + it('can be read with the key', (done) => { + ks.cms.decrypt(cms, (err, plain) => { + expect(err).to.not.exist() + expect(plain).to.exist() + expect(plain.toString()).to.equal(plainData.toString()) + done() + }) + }) + }) + describe('exported key', () => { let pemKey diff --git a/test/node.js b/test/node.js index b003a7c8ba..6ca293ee35 100644 --- a/test/node.js +++ b/test/node.js @@ -30,5 +30,6 @@ describe('node', () => { }) require('./keychain.spec')(datastore1, datastore2) + require('./cms-interop')(datastore2) require('./peerid') }) From 486e54b3ac72cfeb4f91a5ee4bc6059afa590846 Mon Sep 17 00:00:00 2001 From: David Dias Date: Sun, 28 Jan 2018 22:36:44 -0800 Subject: [PATCH 037/102] chore: update contributors From ee978a54ea83bdd47be9e2c7d48f0ce403cd9c61 Mon Sep 17 00:00:00 2001 From: David Dias Date: Sun, 28 Jan 2018 22:36:45 -0800 Subject: [PATCH 038/102] chore: release version v0.3.1 --- CHANGELOG.md | 5 +++++ package.json | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 74e2782182..06b1412fd4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ + +## [0.3.1](https://github.com/libp2p/js-libp2p-keychain/compare/v0.3.0...v0.3.1) (2018-01-29) + + + # [0.3.0](https://github.com/libp2p/js-libp2p-keychain/compare/v0.2.1...v0.3.0) (2018-01-29) diff --git a/package.json b/package.json index 13e0a85f47..f04b5c8e1a 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "libp2p-keychain", - "version": "0.3.0", + "version": "0.3.1", "description": "Key management and cryptographically protected messages", "main": "src/index.js", "scripts": { From 974c5070696cf1233f9e687a4ca806677332ccd0 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Mon, 25 Jun 2018 15:06:17 +0200 Subject: [PATCH 039/102] docs: add lead-maintainer * docs: add lead-maintainer --- README.md | 4 ++++ package.json | 5 ++--- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index a3b1c33ecc..24bcf0d667 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,10 @@ > A secure key chain for libp2p in JavaScript +## Lead Maintainer + +[Vasco Santos](https://github.com/vasco-santos). + ## Features - Manages the lifecycle of a key diff --git a/package.json b/package.json index f04b5c8e1a..73598b9ff8 100644 --- a/package.json +++ b/package.json @@ -2,6 +2,7 @@ "name": "libp2p-keychain", "version": "0.3.1", "description": "Key management and cryptographically protected messages", + "leadMaintainer": "Vasco Santos ", "main": "src/index.js", "scripts": { "lint": "aegir lint", @@ -15,7 +16,7 @@ "coverage": "aegir coverage", "coverage-publish": "aegir coverage publish" }, - "pre-commit": [ + "pre-push": [ "lint", "test" ], @@ -35,7 +36,6 @@ "secure", "crypto" ], - "author": "Richard Schneider ", "license": "MIT", "bugs": { "url": "https://github.com/libp2p/js-libp2p-keychain/issues" @@ -59,7 +59,6 @@ "level-js": "^2.2.4", "mocha": "^5.0.0", "peer-id": "~0.10.5", - "pre-commit": "^1.2.2", "rimraf": "^2.6.2" }, "contributors": [ From 0065b0a49e1b5403cda2ef77d4d9a94f00c01c73 Mon Sep 17 00:00:00 2001 From: Masahiro Saito Date: Sat, 30 Jun 2018 07:38:19 +0900 Subject: [PATCH 040/102] chore: fix out of date npms (#21) --- package.json | 6 +++--- src/keychain.js | 5 +++-- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index 73598b9ff8..6526a147ce 100644 --- a/package.json +++ b/package.json @@ -43,14 +43,14 @@ "homepage": "https://github.com/libp2p/js-libp2p-keychain#readme", "dependencies": { "async": "^2.6.0", - "deepmerge": "^1.5.2", "interface-datastore": "~0.4.2", - "libp2p-crypto": "~0.12.0", + "libp2p-crypto": "~0.13.0", + "lodash.merge": "^4.6.1", "pull-stream": "^3.6.1", "sanitize-filename": "^1.6.1" }, "devDependencies": { - "aegir": "^12.4.0", + "aegir": "^13.0.7", "chai": "^4.1.2", "chai-string": "^1.4.0", "datastore-fs": "~0.4.2", diff --git a/src/keychain.js b/src/keychain.js index 41f5c1c4ce..0ec392a630 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -2,7 +2,7 @@ 'use strict' const sanitize = require('sanitize-filename') -const deepmerge = require('deepmerge') +const deepmerge = require('lodash.merge') const crypto = require('libp2p-crypto') const DS = require('interface-datastore') const pull = require('pull-stream') @@ -104,7 +104,8 @@ class Keychain { } this.store = store - const opts = deepmerge(defaultOptions, options) + const opts = {} + deepmerge(opts, defaultOptions, options) // Enforce NIST SP 800-132 if (!opts.passPhrase || opts.passPhrase.length < 20) { From 73d4530c5bc00e082ee2398945e195ce7d781cfe Mon Sep 17 00:00:00 2001 From: David Dias Date: Sat, 30 Jun 2018 15:13:49 +0100 Subject: [PATCH 041/102] chore: update deps --- package.json | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/package.json b/package.json index 6526a147ce..d09ff45d35 100644 --- a/package.json +++ b/package.json @@ -42,23 +42,23 @@ }, "homepage": "https://github.com/libp2p/js-libp2p-keychain#readme", "dependencies": { - "async": "^2.6.0", + "async": "^2.6.1", "interface-datastore": "~0.4.2", "libp2p-crypto": "~0.13.0", "lodash.merge": "^4.6.1", - "pull-stream": "^3.6.1", + "pull-stream": "^3.6.8", "sanitize-filename": "^1.6.1" }, "devDependencies": { - "aegir": "^13.0.7", + "aegir": "^14.0.0", "chai": "^4.1.2", "chai-string": "^1.4.0", - "datastore-fs": "~0.4.2", - "datastore-level": "~0.7.0", + "datastore-fs": "~0.5.0", + "datastore-level": "~0.8.0", "dirty-chai": "^2.0.1", - "level-js": "^2.2.4", - "mocha": "^5.0.0", - "peer-id": "~0.10.5", + "level-js": "^3.0.0", + "mocha": "^5.2.0", + "peer-id": "~0.10.7", "rimraf": "^2.6.2" }, "contributors": [ From f95fef4ad2e8bdf1d132c5264d8078bc5cf090f0 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Tue, 3 Jul 2018 16:21:32 +0200 Subject: [PATCH 042/102] chore: use lodash main dependency --- package.json | 2 +- src/keychain.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index d09ff45d35..62155f30ca 100644 --- a/package.json +++ b/package.json @@ -45,7 +45,7 @@ "async": "^2.6.1", "interface-datastore": "~0.4.2", "libp2p-crypto": "~0.13.0", - "lodash.merge": "^4.6.1", + "lodash": "^4.6.1", "pull-stream": "^3.6.8", "sanitize-filename": "^1.6.1" }, diff --git a/src/keychain.js b/src/keychain.js index 0ec392a630..da94ab2e59 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -2,7 +2,7 @@ 'use strict' const sanitize = require('sanitize-filename') -const deepmerge = require('lodash.merge') +const deepmerge = require('lodash/merge') const crypto = require('libp2p-crypto') const DS = require('interface-datastore') const pull = require('pull-stream') From 8dfaab1af03bcadf7bad60d903180fcebb189e13 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Tue, 18 Sep 2018 12:48:58 +0100 Subject: [PATCH 043/102] fix: validate createKey params properly (#26) License: MIT Signed-off-by: Alan Shaw --- src/keychain.js | 12 ++++++++++++ test/keychain.spec.js | 24 ++++++++++++++++++++++++ 2 files changed, 36 insertions(+) diff --git a/src/keychain.js b/src/keychain.js index da94ab2e59..a9619fdb4a 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -6,6 +6,8 @@ const deepmerge = require('lodash/merge') const crypto = require('libp2p-crypto') const DS = require('interface-datastore') const pull = require('pull-stream') +const isString = require('lodash/isString') +const isSafeInteger = require('lodash/isSafeInteger') const CMS = require('./cms') const keyPrefix = '/pkcs8/' @@ -30,6 +32,7 @@ const defaultOptions = { function validateKeyName (name) { if (!name) return false + if (!isString(name)) return false return name === sanitize(name.trim()) } @@ -182,6 +185,15 @@ class Keychain { if (!validateKeyName(name) || name === 'self') { return _error(callback, `Invalid key name '${name}'`) } + + if (!isString(type)) { + return _error(callback, `Invalid key type '${type}'`) + } + + if (!isSafeInteger(size)) { + return _error(callback, `Invalid key size '${size}'`) + } + const dsname = DsName(name) self.store.has(dsname, (err, exists) => { if (err) return _error(callback, err) diff --git a/test/keychain.spec.js b/test/keychain.spec.js index ae78cb1e7b..ed6f1a80f0 100644 --- a/test/keychain.spec.js +++ b/test/keychain.spec.js @@ -117,6 +117,30 @@ module.exports = (datastore1, datastore2) => { }) }) + it('should validate name is string', (done) => { + ks.createKey(5, 'rsa', 2048, (err) => { + expect(err).to.exist() + expect(err.message).to.contain('Invalid key name') + done() + }) + }) + + it('should validate type is string', (done) => { + ks.createKey('TEST' + Date.now(), null, 2048, (err) => { + expect(err).to.exist() + expect(err.message).to.contain('Invalid key type') + done() + }) + }) + + it('should validate size is integer', (done) => { + ks.createKey('TEST' + Date.now(), 'rsa', 'string', (err) => { + expect(err).to.exist() + expect(err.message).to.contain('Invalid key size') + done() + }) + }) + describe('implements NIST SP 800-131A', () => { it('disallows RSA length < 2048', (done) => { ks.createKey('bad-nist-rsa', 'rsa', 1024, (err) => { From 65129bff3b445a8df0cfd390635c7f022dbf6c11 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Tue, 18 Sep 2018 13:02:43 +0100 Subject: [PATCH 044/102] chore: update contributors --- package.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/package.json b/package.json index 62155f30ca..77fdec15a0 100644 --- a/package.json +++ b/package.json @@ -62,9 +62,12 @@ "rimraf": "^2.6.2" }, "contributors": [ + "Alan Shaw ", "David Dias ", "Maciej Krüger ", + "Masahiro Saito ", "Richard Schneider ", + "Vasco Santos ", "Victor Bjelkholm " ] } From 5d3f489f23622415cb6d29d891d87743f295b06a Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Tue, 18 Sep 2018 13:02:43 +0100 Subject: [PATCH 045/102] chore: release version v0.3.2 --- CHANGELOG.md | 10 ++++++++++ package.json | 2 +- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 06b1412fd4..8714d39d3e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,13 @@ + +## [0.3.2](https://github.com/libp2p/js-libp2p-keychain/compare/v0.3.1...v0.3.2) (2018-09-18) + + +### Bug Fixes + +* validate createKey params properly ([#26](https://github.com/libp2p/js-libp2p-keychain/issues/26)) ([8dfaab1](https://github.com/libp2p/js-libp2p-keychain/commit/8dfaab1)) + + + ## [0.3.1](https://github.com/libp2p/js-libp2p-keychain/compare/v0.3.0...v0.3.1) (2018-01-29) diff --git a/package.json b/package.json index 77fdec15a0..958a6f46b4 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "libp2p-keychain", - "version": "0.3.1", + "version": "0.3.2", "description": "Key management and cryptographically protected messages", "leadMaintainer": "Vasco Santos ", "main": "src/index.js", From 24d4374b2094918e556c11f2d2f0c2032ad9b98d Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Thu, 25 Oct 2018 09:31:32 +0100 Subject: [PATCH 046/102] chore: upgrade dependencies (#27) --- package.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index 958a6f46b4..3f09155240 100644 --- a/package.json +++ b/package.json @@ -43,15 +43,15 @@ "homepage": "https://github.com/libp2p/js-libp2p-keychain#readme", "dependencies": { "async": "^2.6.1", - "interface-datastore": "~0.4.2", + "interface-datastore": "~0.6.0", "libp2p-crypto": "~0.13.0", "lodash": "^4.6.1", "pull-stream": "^3.6.8", "sanitize-filename": "^1.6.1" }, "devDependencies": { - "aegir": "^14.0.0", - "chai": "^4.1.2", + "aegir": "^15.3.0", + "chai": "^4.2.0", "chai-string": "^1.4.0", "datastore-fs": "~0.5.0", "datastore-level": "~0.8.0", From 571c81a2be5a09bdbb58e6b8699fdb415b82b645 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Thu, 25 Oct 2018 09:37:22 +0100 Subject: [PATCH 047/102] chore: update contributors --- package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index 3f09155240..e9e265e24b 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "libp2p-keychain", - "version": "0.3.2", + "version": "0.3.3", "description": "Key management and cryptographically protected messages", "leadMaintainer": "Vasco Santos ", "main": "src/index.js", @@ -68,6 +68,7 @@ "Masahiro Saito ", "Richard Schneider ", "Vasco Santos ", + "Vasco Santos ", "Victor Bjelkholm " ] } From 251e0b87b64626798405b6c0da2c8246b141b9b5 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Thu, 25 Oct 2018 09:37:22 +0100 Subject: [PATCH 048/102] chore: release version v0.3.3 --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8714d39d3e..1c9548a9d0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ + +## [0.3.3](https://github.com/libp2p/js-libp2p-keychain/compare/v0.3.2...v0.3.3) (2018-10-25) + + + ## [0.3.2](https://github.com/libp2p/js-libp2p-keychain/compare/v0.3.1...v0.3.2) (2018-09-18) From 17268d5fe3bdd5e92f0fabb5a39c39841dd72a79 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Fri, 4 Jan 2019 10:51:56 +0000 Subject: [PATCH 049/102] chore: update dependencies (#29) --- .travis.yml | 28 ---------------------------- circle.yml | 15 --------------- package.json | 20 ++++++++++---------- test/browser.js | 4 ++-- 4 files changed, 12 insertions(+), 55 deletions(-) delete mode 100644 .travis.yml delete mode 100644 circle.yml diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index a456ff1244..0000000000 --- a/.travis.yml +++ /dev/null @@ -1,28 +0,0 @@ -# Warning: This file is automatically synced from https://github.com/ipfs/ci-sync so if you want to change it, please change it there and ask someone to sync all repositories. -sudo: false -language: node_js - -matrix: - include: - - node_js: 6 - env: CXX=g++-4.8 - - node_js: 8 - env: CXX=g++-4.8 - # - node_js: stable - # env: CXX=g++-4.8 - -script: - - npm run lint - - npm run test - -before_script: - - export DISPLAY=:99.0 - - sh -e /etc/init.d/xvfb start - -addons: - firefox: 'latest' - apt: - sources: - - ubuntu-toolchain-r-test - packages: - - g++-4.8 diff --git a/circle.yml b/circle.yml deleted file mode 100644 index 00096937fd..0000000000 --- a/circle.yml +++ /dev/null @@ -1,15 +0,0 @@ -# Warning: This file is automatically synced from https://github.com/ipfs/ci-sync so if you want to change it, please change it there and ask someone to sync all repositories. -machine: - node: - version: stable - -dependencies: - pre: - - google-chrome --version - - curl -L -o google-chrome.deb https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb - - sudo dpkg -i google-chrome.deb || true - - sudo apt-get update - - sudo apt-get install -f - - sudo apt-get install --only-upgrade lsb-base - - sudo dpkg -i google-chrome.deb - - google-chrome --version diff --git a/package.json b/package.json index e9e265e24b..15d8f4bad1 100644 --- a/package.json +++ b/package.json @@ -44,22 +44,22 @@ "dependencies": { "async": "^2.6.1", "interface-datastore": "~0.6.0", - "libp2p-crypto": "~0.13.0", - "lodash": "^4.6.1", - "pull-stream": "^3.6.8", + "libp2p-crypto": "~0.15.0", + "lodash": "^4.17.11", + "pull-stream": "^3.6.9", "sanitize-filename": "^1.6.1" }, "devDependencies": { - "aegir": "^15.3.0", + "aegir": "^18.0.2", "chai": "^4.2.0", - "chai-string": "^1.4.0", - "datastore-fs": "~0.5.0", - "datastore-level": "~0.8.0", + "chai-string": "^1.5.0", + "datastore-fs": "~0.7.0", + "datastore-level": "~0.10.0", "dirty-chai": "^2.0.1", - "level-js": "^3.0.0", + "level-js": "^4.0.0", "mocha": "^5.2.0", - "peer-id": "~0.10.7", - "rimraf": "^2.6.2" + "peer-id": "~0.12.1", + "rimraf": "^2.6.3" }, "contributors": [ "Alan Shaw ", diff --git a/test/browser.js b/test/browser.js index e1aa2b00ad..374ce6d02f 100644 --- a/test/browser.js +++ b/test/browser.js @@ -5,8 +5,8 @@ const async = require('async') const LevelStore = require('datastore-level') describe('browser', () => { - const datastore1 = new LevelStore('test-keystore-1', {db: require('level-js')}) - const datastore2 = new LevelStore('test-keystore-2', {db: require('level-js')}) + const datastore1 = new LevelStore('test-keystore-1', { db: require('level-js') }) + const datastore2 = new LevelStore('test-keystore-2', { db: require('level-js') }) before((done) => { async.series([ From a753b1c88276f7753adfe3b2e1ffe68ba88b0133 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Fri, 4 Jan 2019 10:54:24 +0000 Subject: [PATCH 050/102] chore: update contributors --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 15d8f4bad1..9cd9fed074 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "libp2p-keychain", - "version": "0.3.3", + "version": "0.3.4", "description": "Key management and cryptographically protected messages", "leadMaintainer": "Vasco Santos ", "main": "src/index.js", From 4b895cf46ffdf0eae8ae15fee1bb4800d6f97e9b Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Fri, 4 Jan 2019 10:54:24 +0000 Subject: [PATCH 051/102] chore: release version v0.3.4 --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1c9548a9d0..01ba3a4bd1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ + +## [0.3.4](https://github.com/libp2p/js-libp2p-keychain/compare/v0.3.3...v0.3.4) (2019-01-04) + + + ## [0.3.3](https://github.com/libp2p/js-libp2p-keychain/compare/v0.3.2...v0.3.3) (2018-10-25) From 7eeed87b101a6a68560f350502f230e38bd276b6 Mon Sep 17 00:00:00 2001 From: Hugo Dias Date: Thu, 10 Jan 2019 11:16:03 +0000 Subject: [PATCH 052/102] fix: reduce bundle size (#28) --- .gitignore | 1 + appveyor.yml | 29 ----------------------------- package.json | 14 ++++++++------ src/cms.js | 19 ++++++++++++------- src/keychain.js | 18 ++++++++---------- src/util.js | 3 ++- test/browser.js | 6 +++--- test/node.js | 6 +++--- 8 files changed, 37 insertions(+), 59 deletions(-) delete mode 100644 appveyor.yml diff --git a/.gitignore b/.gitignore index 1c73b3783a..b64f085266 100644 --- a/.gitignore +++ b/.gitignore @@ -41,3 +41,4 @@ test/test-data/go-ipfs-repo/LOG.old # while testing npm5 package-lock.json +yarn.lock \ No newline at end of file diff --git a/appveyor.yml b/appveyor.yml deleted file mode 100644 index 046bf91084..0000000000 --- a/appveyor.yml +++ /dev/null @@ -1,29 +0,0 @@ -# Warning: This file is automatically synced from https://github.com/ipfs/ci-sync so if you want to change it, please change it there and ask someone to sync all repositories. -version: "{build}" - -environment: - matrix: - - nodejs_version: "6" - - nodejs_version: "8" - -matrix: - fast_finish: true - -install: - # Install Node.js - - ps: Install-Product node $env:nodejs_version - - # Upgrade npm - - npm install -g npm - - # Output our current versions for debugging - - node --version - - npm --version - - # Install our package dependencies - - npm install - -test_script: - - npm run test:node - -build: off diff --git a/package.json b/package.json index 9cd9fed074..3b9752893c 100644 --- a/package.json +++ b/package.json @@ -44,21 +44,23 @@ "dependencies": { "async": "^2.6.1", "interface-datastore": "~0.6.0", - "libp2p-crypto": "~0.15.0", - "lodash": "^4.17.11", - "pull-stream": "^3.6.9", + "libp2p-crypto": "~0.16.0", + "merge-options": "^1.0.1", + "node-forge": "~0.7.6", + "pull-stream": "^3.6.8", "sanitize-filename": "^1.6.1" }, "devDependencies": { - "aegir": "^18.0.2", + "aegir": "^18.0.3", "chai": "^4.2.0", - "chai-string": "^1.5.0", + "chai-string": "^1.4.0", "datastore-fs": "~0.7.0", "datastore-level": "~0.10.0", "dirty-chai": "^2.0.1", "level-js": "^4.0.0", "mocha": "^5.2.0", - "peer-id": "~0.12.1", + "multihashes": "~0.4.14", + "peer-id": "~0.12.2", "rimraf": "^2.6.3" }, "contributors": [ diff --git a/src/cms.js b/src/cms.js index 937063cc25..90d7d85fdc 100644 --- a/src/cms.js +++ b/src/cms.js @@ -1,7 +1,12 @@ 'use strict' -const async = require('async') -const forge = require('node-forge') +const setImmediate = require('async/setImmediate') +const series = require('async/series') +const detect = require('async/detect') +const waterfall = require('async/waterfall') +require('node-forge/lib/pkcs7') +require('node-forge/lib/pbe') +const forge = require('node-forge/lib/forge') const util = require('./util') /** @@ -39,13 +44,13 @@ class CMS { */ encrypt (name, plain, callback) { const self = this - const done = (err, result) => async.setImmediate(() => callback(err, result)) + const done = (err, result) => setImmediate(() => callback(err, result)) if (!Buffer.isBuffer(plain)) { return done(new Error('Plain data must be a Buffer')) } - async.series([ + series([ (cb) => self.keychain.findKeyByName(name, cb), (cb) => self.keychain._getPrivateKey(name, cb) ], (err, results) => { @@ -85,7 +90,7 @@ class CMS { * @returns {undefined} */ decrypt (cmsData, callback) { - const done = (err, result) => async.setImmediate(() => callback(err, result)) + const done = (err, result) => setImmediate(() => callback(err, result)) if (!Buffer.isBuffer(cmsData)) { return done(new Error('CMS data is required')) @@ -112,7 +117,7 @@ class CMS { keyId: r.issuer.find(a => a.shortName === 'CN').value } }) - async.detect( + detect( recipients, (r, cb) => self.keychain.findKeyById(r.keyId, (err, info) => cb(null, !err && info)), (err, r) => { @@ -124,7 +129,7 @@ class CMS { return done(err) } - async.waterfall([ + waterfall([ (cb) => self.keychain.findKeyById(r.keyId, cb), (key, cb) => self.keychain._getPrivateKey(key.name, cb) ], (err, pem) => { diff --git a/src/keychain.js b/src/keychain.js index a9619fdb4a..cecc3207e7 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -2,12 +2,11 @@ 'use strict' const sanitize = require('sanitize-filename') -const deepmerge = require('lodash/merge') +const mergeOptions = require('merge-options') const crypto = require('libp2p-crypto') const DS = require('interface-datastore') -const pull = require('pull-stream') -const isString = require('lodash/isString') -const isSafeInteger = require('lodash/isSafeInteger') +const collect = require('pull-stream/sinks/collect') +const pull = require('pull-stream/pull') const CMS = require('./cms') const keyPrefix = '/pkcs8/' @@ -32,7 +31,7 @@ const defaultOptions = { function validateKeyName (name) { if (!name) return false - if (!isString(name)) return false + if (typeof name !== 'string') return false return name === sanitize(name.trim()) } @@ -107,8 +106,7 @@ class Keychain { } this.store = store - const opts = {} - deepmerge(opts, defaultOptions, options) + const opts = mergeOptions(defaultOptions, options) // Enforce NIST SP 800-132 if (!opts.passPhrase || opts.passPhrase.length < 20) { @@ -186,11 +184,11 @@ class Keychain { return _error(callback, `Invalid key name '${name}'`) } - if (!isString(type)) { + if (typeof type !== 'string') { return _error(callback, `Invalid key type '${type}'`) } - if (!isSafeInteger(size)) { + if (!Number.isSafeInteger(size)) { return _error(callback, `Invalid key size '${size}'`) } @@ -246,7 +244,7 @@ class Keychain { } pull( self.store.query(query), - pull.collect((err, res) => { + collect((err, res) => { if (err) return _error(callback, err) const info = res.map(r => JSON.parse(r.value)) diff --git a/src/util.js b/src/util.js index 9aa248ff9a..bc61c5b7fc 100644 --- a/src/util.js +++ b/src/util.js @@ -1,6 +1,7 @@ 'use strict' -const forge = require('node-forge') +require('node-forge/lib/x509') +const forge = require('node-forge/lib/forge') const pki = forge.pki exports = module.exports diff --git a/test/browser.js b/test/browser.js index 374ce6d02f..0a37bedd94 100644 --- a/test/browser.js +++ b/test/browser.js @@ -1,7 +1,7 @@ /* eslint-env mocha */ 'use strict' -const async = require('async') +const series = require('async/series') const LevelStore = require('datastore-level') describe('browser', () => { @@ -9,14 +9,14 @@ describe('browser', () => { const datastore2 = new LevelStore('test-keystore-2', { db: require('level-js') }) before((done) => { - async.series([ + series([ (cb) => datastore1.open(cb), (cb) => datastore2.open(cb) ], done) }) after((done) => { - async.series([ + series([ (cb) => datastore1.close(cb), (cb) => datastore2.close(cb) ], done) diff --git a/test/node.js b/test/node.js index 6ca293ee35..e11d074431 100644 --- a/test/node.js +++ b/test/node.js @@ -4,7 +4,7 @@ const os = require('os') const path = require('path') const rimraf = require('rimraf') -const async = require('async') +const series = require('async/series') const FsStore = require('datastore-fs') describe('node', () => { @@ -14,14 +14,14 @@ describe('node', () => { const datastore2 = new FsStore(store2) before((done) => { - async.series([ + series([ (cb) => datastore1.open(cb), (cb) => datastore2.open(cb) ], done) }) after((done) => { - async.series([ + series([ (cb) => datastore1.close(cb), (cb) => datastore2.close(cb), (cb) => rimraf(store1, cb), From 5cbded55d516750da1b6069c85aa39d1de6aaa75 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Thu, 10 Jan 2019 11:24:15 +0000 Subject: [PATCH 053/102] chore: update contributors --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 9cd9fed074..069fe75c5a 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "libp2p-keychain", - "version": "0.3.4", + "version": "0.3.5", "description": "Key management and cryptographically protected messages", "leadMaintainer": "Vasco Santos ", "main": "src/index.js", @@ -67,8 +67,8 @@ "Maciej Krüger ", "Masahiro Saito ", "Richard Schneider ", - "Vasco Santos ", "Vasco Santos ", + "Vasco Santos ", "Victor Bjelkholm " ] } From 4dd2ad36dd03e66a35d7bc40b9a463bd19a76bf9 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Thu, 10 Jan 2019 11:24:15 +0000 Subject: [PATCH 054/102] chore: release version v0.3.5 --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 01ba3a4bd1..21b3e53cec 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ + +## [0.3.5](https://github.com/libp2p/js-libp2p-keychain/compare/v0.3.4...v0.3.5) (2019-01-10) + + + ## [0.3.4](https://github.com/libp2p/js-libp2p-keychain/compare/v0.3.3...v0.3.4) (2019-01-04) From eaf6a88b47f08cec4bff147b110725b6957298d3 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Thu, 10 Jan 2019 11:33:28 +0000 Subject: [PATCH 055/102] chore: update contributors --- package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index fb4d11b94a..9055b7cc48 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "libp2p-keychain", - "version": "0.3.5", + "version": "0.3.6", "description": "Key management and cryptographically protected messages", "leadMaintainer": "Vasco Santos ", "main": "src/index.js", @@ -66,6 +66,7 @@ "contributors": [ "Alan Shaw ", "David Dias ", + "Hugo Dias ", "Maciej Krüger ", "Masahiro Saito ", "Richard Schneider ", From aa5a6cb73c564a0b01dfb6d2b74c4ca490b8e2cd Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Thu, 10 Jan 2019 11:33:28 +0000 Subject: [PATCH 056/102] chore: release version v0.3.6 --- CHANGELOG.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 21b3e53cec..5f9af60949 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,13 @@ + +## [0.3.6](https://github.com/libp2p/js-libp2p-keychain/compare/v0.3.5...v0.3.6) (2019-01-10) + + +### Bug Fixes + +* reduce bundle size ([#28](https://github.com/libp2p/js-libp2p-keychain/issues/28)) ([7eeed87](https://github.com/libp2p/js-libp2p-keychain/commit/7eeed87)) + + + ## [0.3.5](https://github.com/libp2p/js-libp2p-keychain/compare/v0.3.4...v0.3.5) (2019-01-10) From 3779bd0ba2e579432e55c1d01ca3d5f1daabed13 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Mon, 18 Feb 2019 14:15:55 +0000 Subject: [PATCH 057/102] chore: use travis (#32) --- .travis.yml | 42 ++++++++++++++++++++++++++++++++++++++++++ ci/Jenkinsfile | 2 -- 2 files changed, 42 insertions(+), 2 deletions(-) create mode 100644 .travis.yml delete mode 100644 ci/Jenkinsfile diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000000..37005a25df --- /dev/null +++ b/.travis.yml @@ -0,0 +1,42 @@ +language: node_js +cache: npm +stages: + - check + - test + - cov + +node_js: + - '10' + +os: + - linux + - osx + - windows + +script: npx nyc -s npm run test:node -- --bail +after_success: npx nyc report --reporter=text-lcov > coverage.lcov && npx codecov + +jobs: + include: + - stage: check + script: + - npx aegir commitlint --travis + - npx aegir dep-check -- -i wrtc -i electron-webrtc + - npm run lint + + - stage: test + name: chrome + addons: + chrome: stable + script: + - npx aegir test -t browser + + - stage: test + name: firefox + addons: + firefox: latest + script: + - npx aegir test -t browser -- --browsers FirefoxHeadless + +notifications: + email: false diff --git a/ci/Jenkinsfile b/ci/Jenkinsfile deleted file mode 100644 index a7da2e54f3..0000000000 --- a/ci/Jenkinsfile +++ /dev/null @@ -1,2 +0,0 @@ -// Warning: This file is automatically synced from https://github.com/ipfs/ci-sync so if you want to change it, please change it there and ask someone to sync all repositories. -javascript() From 9eb11f42452b4266db0c19de3310e809a0087669 Mon Sep 17 00:00:00 2001 From: Alberto Elias Date: Mon, 25 Feb 2019 12:04:54 +0100 Subject: [PATCH 058/102] feat: adds support for ed25199 and secp256k1 (#31) --- README.md | 6 +- src/keychain.js | 128 +++++++++++++++++++++------------------- test/keychain.spec.js | 132 ++++++++++++++++++++++++++++++------------ 3 files changed, 168 insertions(+), 98 deletions(-) diff --git a/README.md b/README.md index 24bcf0d667..655ff03d2d 100644 --- a/README.md +++ b/README.md @@ -57,8 +57,8 @@ Managing a key - `createKey (name, type, size, callback)` - `renameKey (oldName, newName, callback)` - `removeKey (name, callback)` -- `exportKey (name, password, callback)` -- `importKey (name, pem, password, callback)` +- `exportKey (name, password, callback)` // Omit _password_ for `ed25199` or `secp256k1` keys +- `importKey (name, encKey, password, callback)` // Omit _password_ for `ed25199` or `secp256k1` keys - `importPeer (name, peer, callback)` A naming service for a key @@ -67,7 +67,7 @@ A naming service for a key - `findKeyById (id, callback)` - `findKeyByName (name, callback)` -Cryptographically protected messages +Cryptographically protected messages (Only supported with RSA keys) - `cms.encrypt (name, plain, callback)` - `cms.decrypt (cmsData, callback)` diff --git a/src/keychain.js b/src/keychain.js index cecc3207e7..e236d8a860 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -20,7 +20,7 @@ const NIST = { } const defaultOptions = { - // See https://cryptosense.com/parametesr-choice-for-pbkdf2/ + // See https://cryptosense.com/blog/parameter-choice-for-pbkdf2/ dek: { keyLength: 512 / 8, iterationCount: 10000, @@ -197,7 +197,8 @@ class Keychain { if (err) return _error(callback, err) if (exists) return _error(callback, `Key '${name}' already exists`) - switch (type.toLowerCase()) { + type = type.toLowerCase() + switch (type) { case 'rsa': if (size < 2048) { return _error(callback, `Invalid RSA key size ${size}`) @@ -211,21 +212,16 @@ class Keychain { if (err) return _error(callback, err) keypair.id((err, kid) => { if (err) return _error(callback, err) - keypair.export(this._(), (err, pem) => { - if (err) return _error(callback, err) - const keyInfo = { - name: name, - id: kid - } - const batch = self.store.batch() - batch.put(dsname, pem) - batch.put(DsInfoName(name), JSON.stringify(keyInfo)) - batch.commit((err) => { - if (err) return _error(callback, err) - callback(null, keyInfo) + if (type === 'ed25519' || type === 'secp256k1') { + const keypairMarshal = keypair.bytes + self._storeKey(name, kid, keypairMarshal, dsname, callback) + } else { + keypair.export(this._(), (err, pem) => { + if (err) return _error(callback, err) + self._storeKey(name, kid, pem, dsname, callback) }) - }) + } }) }) }) @@ -365,76 +361,85 @@ class Keychain { } /** - * Export an existing key as a PEM encrypted PKCS #8 string + * Export an existing key. + * If it's as an RSA key, include a password to export as a PEM encrypted PKCS #8 string * * @param {string} name - The local key name; must already exist. - * @param {string} password - The password + * @param {string} password - The password, for RSA keys (optional) * @param {function(Error, string)} callback * @returns {undefined} */ exportKey (name, password, callback) { + if (typeof password === 'function' && typeof callback === 'undefined') { + callback = password + password = undefined + } if (!validateKeyName(name)) { return _error(callback, `Invalid key name '${name}'`) } - if (!password) { - return _error(callback, 'Password is required') - } const dsname = DsName(name) this.store.get(dsname, (err, res) => { if (err) { return _error(callback, `Key '${name}' does not exist. ${err.message}`) } - const pem = res.toString() - crypto.keys.import(pem, this._(), (err, privateKey) => { - if (err) return _error(callback, err) - privateKey.export(password, callback) - }) + if (password) { + const encKey = res.toString() + crypto.keys.import(encKey, this._(), (err, privateKey) => { + if (err) return _error(callback, err) + privateKey.export(password, callback) + }) + } else { + crypto.keys.unmarshalPrivateKey(res, callback) + } }) } /** - * Import a new key from a PEM encoded PKCS #8 string + * Import a new key + * If it's as an RSA key, include a password to import from a PEM encrypted PKCS #8 string * * @param {string} name - The local key name; must not already exist. - * @param {string} pem - The PEM encoded PKCS #8 string - * @param {string} password - The password. + * @param {string} encKey - The encoded key. If it's an RSA key, it needs to be a PEM encoded PKCS #8 string + * @param {string} password - The password for RSA keys. (optional) * @param {function(Error, KeyInfo)} callback * @returns {undefined} */ - importKey (name, pem, password, callback) { + importKey (name, encKey, password, callback) { const self = this + if (typeof password === 'function' && typeof callback === 'undefined') { + callback = password + password = undefined + } if (!validateKeyName(name) || name === 'self') { return _error(callback, `Invalid key name '${name}'`) } - if (!pem) { - return _error(callback, 'PEM encoded key is required') + if (!encKey) { + return _error(callback, 'The encoded key is required') } + const dsname = DsName(name) self.store.has(dsname, (err, exists) => { if (err) return _error(callback, err) if (exists) return _error(callback, `Key '${name}' already exists`) - crypto.keys.import(pem, password, (err, privateKey) => { - if (err) return _error(callback, 'Cannot read the key, most likely the password is wrong') - privateKey.id((err, kid) => { - if (err) return _error(callback, err) - privateKey.export(this._(), (err, pem) => { + + if (password) { + crypto.keys.import(encKey, password, (err, privateKey) => { + if (err) return _error(callback, 'Cannot read the key, most likely the password is wrong') + privateKey.id((err, kid) => { if (err) return _error(callback, err) - const keyInfo = { - name: name, - id: kid - } - const batch = self.store.batch() - batch.put(dsname, pem) - batch.put(DsInfoName(name), JSON.stringify(keyInfo)) - batch.commit((err) => { + privateKey.export(this._(), (err, pem) => { if (err) return _error(callback, err) - - callback(null, keyInfo) + self._storeKey(name, kid, pem, dsname, callback) }) }) }) - }) + } else { + encKey.id((err, kid) => { + if (err) return _error(callback, err) + self._storeKey(name, kid, encKey.bytes, dsname, callback) + }) + } }) } @@ -457,23 +462,28 @@ class Keychain { if (err) return _error(callback, err) privateKey.export(this._(), (err, pem) => { if (err) return _error(callback, err) - const keyInfo = { - name: name, - id: kid - } - const batch = self.store.batch() - batch.put(dsname, pem) - batch.put(DsInfoName(name), JSON.stringify(keyInfo)) - batch.commit((err) => { - if (err) return _error(callback, err) - - callback(null, keyInfo) - }) + self._storeKey(name, kid, pem, dsname, callback) }) }) }) } + _storeKey (name, kid, encKey, dsname, callback) { + const self = this + const keyInfo = { + name: name, + id: kid + } + const batch = self.store.batch() + batch.put(dsname, encKey) + batch.put(DsInfoName(name), JSON.stringify(keyInfo)) + batch.commit((err) => { + if (err) return _error(callback, err) + + callback(null, keyInfo) + }) + } + /** * Gets the private key as PEM encoded PKCS #8 string. * diff --git a/test/keychain.spec.js b/test/keychain.spec.js index ed6f1a80f0..9e3c6dc6ab 100644 --- a/test/keychain.spec.js +++ b/test/keychain.spec.js @@ -13,9 +13,11 @@ const PeerId = require('peer-id') module.exports = (datastore1, datastore2) => { describe('keychain', () => { const passPhrase = 'this is not a secure phrase' - const rsaKeyName = 'tajné jméno' - const renamedRsaKeyName = 'ชื่อลับ' - let rsaKeyInfo + const keyName = 'tajné jméno' + const renamedKeyName = 'ชื่อลับ' + let keyInfo + let ecKeyInfo + let secpKeyInfo let emptyKeystore let ks @@ -80,23 +82,43 @@ module.exports = (datastore1, datastore2) => { }) describe('key', () => { + it('can be an ed25519 key', function (done) { + this.timeout(50 * 1000) + ks.createKey(keyName + 'ed25519', 'ed25519', 2048, (err, info) => { + expect(err).to.not.exist() + expect(info).exist() + ecKeyInfo = info + done() + }) + }) + + it('can be an secp256k1 key', function (done) { + this.timeout(50 * 1000) + ks.createKey(keyName + 'secp256k1', 'secp256k1', 2048, (err, info) => { + expect(err).to.not.exist() + expect(info).exist() + secpKeyInfo = info + done() + }) + }) + it('can be an RSA key', function (done) { this.timeout(50 * 1000) - ks.createKey(rsaKeyName, 'rsa', 2048, (err, info) => { + ks.createKey(keyName, 'rsa', 2048, (err, info) => { expect(err).to.not.exist() expect(info).exist() - rsaKeyInfo = info + keyInfo = info done() }) }) it('has a name and id', () => { - expect(rsaKeyInfo).to.have.property('name', rsaKeyName) - expect(rsaKeyInfo).to.have.property('id') + expect(keyInfo).to.have.property('name', keyName) + expect(keyInfo).to.have.property('id') }) it('is encrypted PEM encoded PKCS #8', (done) => { - ks._getPrivateKey(rsaKeyName, (err, pem) => { + ks._getPrivateKey(keyName, (err, pem) => { expect(err).to.not.exist() expect(pem).to.startsWith('-----BEGIN ENCRYPTED PRIVATE KEY-----') done() @@ -104,7 +126,7 @@ module.exports = (datastore1, datastore2) => { }) it('does not overwrite existing key', (done) => { - ks.createKey(rsaKeyName, 'rsa', 2048, (err) => { + ks.createKey(keyName, 'rsa', 2048, (err) => { expect(err).to.exist() done() }) @@ -157,26 +179,26 @@ module.exports = (datastore1, datastore2) => { ks.listKeys((err, keys) => { expect(err).to.not.exist() expect(keys).to.exist() - const mykey = keys.find((k) => k.name.normalize() === rsaKeyName.normalize()) + const mykey = keys.find((k) => k.name.normalize() === keyName.normalize()) expect(mykey).to.exist() done() }) }) it('finds a key by name', (done) => { - ks.findKeyByName(rsaKeyName, (err, key) => { + ks.findKeyByName(keyName, (err, key) => { expect(err).to.not.exist() expect(key).to.exist() - expect(key).to.deep.equal(rsaKeyInfo) + expect(key).to.deep.equal(keyInfo) done() }) }) it('finds a key by id', (done) => { - ks.findKeyById(rsaKeyInfo.id, (err, key) => { + ks.findKeyById(keyInfo.id, (err, key) => { expect(err).to.not.exist() expect(key).to.exist() - expect(key).to.deep.equal(rsaKeyInfo) + expect(key).to.deep.equal(keyInfo) done() }) }) @@ -211,14 +233,14 @@ module.exports = (datastore1, datastore2) => { }) it('requires plain data as a Buffer', (done) => { - ks.cms.encrypt(rsaKeyName, 'plain data', (err, msg) => { + ks.cms.encrypt(keyName, 'plain data', (err, msg) => { expect(err).to.exist() done() }) }) it('encrypts', (done) => { - ks.cms.encrypt(rsaKeyName, plainData, (err, msg) => { + ks.cms.encrypt(keyName, plainData, (err, msg) => { expect(err).to.not.exist() expect(msg).to.exist() expect(msg).to.be.instanceOf(Buffer) @@ -245,7 +267,7 @@ module.exports = (datastore1, datastore2) => { emptyKeystore.cms.decrypt(cms, (err, plain) => { expect(err).to.exist() expect(err).to.have.property('missingKeys') - expect(err.missingKeys).to.eql([rsaKeyInfo.id]) + expect(err.missingKeys).to.eql([keyInfo.id]) done() }) }) @@ -262,9 +284,11 @@ module.exports = (datastore1, datastore2) => { describe('exported key', () => { let pemKey + let ed25519Key + let secp256k1Key it('is a PKCS #8 encrypted pem', (done) => { - ks.exportKey(rsaKeyName, 'password', (err, pem) => { + ks.exportKey(keyName, 'password', (err, pem) => { expect(err).to.not.exist() expect(pem).to.startsWith('-----BEGIN ENCRYPTED PRIVATE KEY-----') pemKey = pem @@ -276,13 +300,49 @@ module.exports = (datastore1, datastore2) => { ks.importKey('imported-key', pemKey, 'password', (err, key) => { expect(err).to.not.exist() expect(key.name).to.equal('imported-key') - expect(key.id).to.equal(rsaKeyInfo.id) + expect(key.id).to.equal(keyInfo.id) + done() + }) + }) + + it('can export ed25519 key', (done) => { + ks.exportKey(keyName + 'ed25519', (err, key) => { + expect(err).to.not.exist() + ed25519Key = key + expect(key).to.exist() + done() + }) + }) + + it('ed25519 key can be imported', (done) => { + ks.importKey('imported-key-ed25199', ed25519Key, (err, key) => { + expect(err).to.not.exist() + expect(key.name).to.equal('imported-key-ed25199') + expect(key.id).to.equal(ecKeyInfo.id) + done() + }) + }) + + it('can export secp256k1 key', (done) => { + ks.exportKey(keyName + 'secp256k1', (err, key) => { + expect(err).to.not.exist() + secp256k1Key = key + expect(key).to.exist() + done() + }) + }) + + it('secp256k1 key can be imported', (done) => { + ks.importKey('imported-key-secp256k1', secp256k1Key, (err, key) => { + expect(err).to.not.exist() + expect(key.name).to.equal('imported-key-secp256k1') + expect(key.id).to.equal(secpKeyInfo.id) done() }) }) it('cannot be imported as an existing key name', (done) => { - ks.importKey(rsaKeyName, pemKey, 'password', (err, key) => { + ks.importKey(keyName, pemKey, 'password', (err, key) => { expect(err).to.exist() done() }) @@ -342,40 +402,40 @@ module.exports = (datastore1, datastore2) => { describe('rename', () => { it('requires an existing key name', (done) => { - ks.renameKey('not-there', renamedRsaKeyName, (err) => { + ks.renameKey('not-there', renamedKeyName, (err) => { expect(err).to.exist() done() }) }) it('requires a valid new key name', (done) => { - ks.renameKey(rsaKeyName, '..\not-valid', (err) => { + ks.renameKey(keyName, '..\not-valid', (err) => { expect(err).to.exist() done() }) }) it('does not overwrite existing key', (done) => { - ks.renameKey(rsaKeyName, rsaKeyName, (err) => { + ks.renameKey(keyName, keyName, (err) => { expect(err).to.exist() done() }) }) it('cannot create the "self" key', (done) => { - ks.renameKey(rsaKeyName, 'self', (err) => { + ks.renameKey(keyName, 'self', (err) => { expect(err).to.exist() done() }) }) it('removes the existing key name', (done) => { - ks.renameKey(rsaKeyName, renamedRsaKeyName, (err, key) => { + ks.renameKey(keyName, renamedKeyName, (err, key) => { expect(err).to.not.exist() expect(key).to.exist() - expect(key).to.have.property('name', renamedRsaKeyName) - expect(key).to.have.property('id', rsaKeyInfo.id) - ks.findKeyByName(rsaKeyName, (err, key) => { + expect(key).to.have.property('name', renamedKeyName) + expect(key).to.have.property('id', keyInfo.id) + ks.findKeyByName(keyName, (err, key) => { expect(err).to.exist() done() }) @@ -383,20 +443,20 @@ module.exports = (datastore1, datastore2) => { }) it('creates the new key name', (done) => { - ks.findKeyByName(renamedRsaKeyName, (err, key) => { + ks.findKeyByName(renamedKeyName, (err, key) => { expect(err).to.not.exist() expect(key).to.exist() - expect(key).to.have.property('name', renamedRsaKeyName) + expect(key).to.have.property('name', renamedKeyName) done() }) }) it('does not change the key ID', (done) => { - ks.findKeyByName(renamedRsaKeyName, (err, key) => { + ks.findKeyByName(renamedKeyName, (err, key) => { expect(err).to.not.exist() expect(key).to.exist() - expect(key).to.have.property('name', renamedRsaKeyName) - expect(key).to.have.property('id', rsaKeyInfo.id) + expect(key).to.have.property('name', renamedKeyName) + expect(key).to.have.property('id', keyInfo.id) done() }) }) @@ -418,11 +478,11 @@ module.exports = (datastore1, datastore2) => { }) it('can remove a known key', (done) => { - ks.removeKey(renamedRsaKeyName, (err, key) => { + ks.removeKey(renamedKeyName, (err, key) => { expect(err).to.not.exist() expect(key).to.exist() - expect(key).to.have.property('name', renamedRsaKeyName) - expect(key).to.have.property('id', rsaKeyInfo.id) + expect(key).to.have.property('name', renamedKeyName) + expect(key).to.have.property('id', keyInfo.id) done() }) }) From 217cfd3de897afa375dcc5bd83db135a3c4c8bf9 Mon Sep 17 00:00:00 2001 From: Alberto Elias Date: Tue, 26 Feb 2019 12:22:59 +0100 Subject: [PATCH 059/102] chore: update libp2p-crypto (#33) --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 9055b7cc48..11333071b2 100644 --- a/package.json +++ b/package.json @@ -44,7 +44,7 @@ "dependencies": { "async": "^2.6.1", "interface-datastore": "~0.6.0", - "libp2p-crypto": "~0.16.0", + "libp2p-crypto": "~0.16.1", "merge-options": "^1.0.1", "node-forge": "~0.7.6", "pull-stream": "^3.6.8", From 267002f646f41d7a9d801ffb771e3d9963baf56d Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Tue, 26 Feb 2019 11:39:49 +0000 Subject: [PATCH 060/102] chore: update contributors --- package.json | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 11333071b2..21b2a6fdec 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "libp2p-keychain", - "version": "0.3.6", + "version": "0.4.0", "description": "Key management and cryptographically protected messages", "leadMaintainer": "Vasco Santos ", "main": "src/index.js", @@ -65,13 +65,14 @@ }, "contributors": [ "Alan Shaw ", + "Alberto Elias ", "David Dias ", "Hugo Dias ", "Maciej Krüger ", "Masahiro Saito ", "Richard Schneider ", - "Vasco Santos ", "Vasco Santos ", + "Vasco Santos ", "Victor Bjelkholm " ] } From e30330e1a0e179d70eb2c082c0a955bf1b4847cf Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Tue, 26 Feb 2019 11:39:50 +0000 Subject: [PATCH 061/102] chore: release version v0.4.0 --- CHANGELOG.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5f9af60949..7ad54ab284 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,13 @@ + +# [0.4.0](https://github.com/libp2p/js-libp2p-keychain/compare/v0.3.6...v0.4.0) (2019-02-26) + + +### Features + +* adds support for ed25199 and secp256k1 ([#31](https://github.com/libp2p/js-libp2p-keychain/issues/31)) ([9eb11f4](https://github.com/libp2p/js-libp2p-keychain/commit/9eb11f4)) + + + ## [0.3.6](https://github.com/libp2p/js-libp2p-keychain/compare/v0.3.5...v0.3.6) (2019-01-10) From f71a6bbb0a044f6325bae3d1ea10b6b1ceda8c9f Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Thu, 14 Mar 2019 22:26:07 +0000 Subject: [PATCH 062/102] Revert "feat: adds support for ed25199 and secp256k1 (#31)" This reverts commit 9eb11f42452b4266db0c19de3310e809a0087669. --- README.md | 6 +- src/keychain.js | 128 +++++++++++++++++++--------------------- test/keychain.spec.js | 132 ++++++++++++------------------------------ 3 files changed, 98 insertions(+), 168 deletions(-) diff --git a/README.md b/README.md index 655ff03d2d..24bcf0d667 100644 --- a/README.md +++ b/README.md @@ -57,8 +57,8 @@ Managing a key - `createKey (name, type, size, callback)` - `renameKey (oldName, newName, callback)` - `removeKey (name, callback)` -- `exportKey (name, password, callback)` // Omit _password_ for `ed25199` or `secp256k1` keys -- `importKey (name, encKey, password, callback)` // Omit _password_ for `ed25199` or `secp256k1` keys +- `exportKey (name, password, callback)` +- `importKey (name, pem, password, callback)` - `importPeer (name, peer, callback)` A naming service for a key @@ -67,7 +67,7 @@ A naming service for a key - `findKeyById (id, callback)` - `findKeyByName (name, callback)` -Cryptographically protected messages (Only supported with RSA keys) +Cryptographically protected messages - `cms.encrypt (name, plain, callback)` - `cms.decrypt (cmsData, callback)` diff --git a/src/keychain.js b/src/keychain.js index e236d8a860..cecc3207e7 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -20,7 +20,7 @@ const NIST = { } const defaultOptions = { - // See https://cryptosense.com/blog/parameter-choice-for-pbkdf2/ + // See https://cryptosense.com/parametesr-choice-for-pbkdf2/ dek: { keyLength: 512 / 8, iterationCount: 10000, @@ -197,8 +197,7 @@ class Keychain { if (err) return _error(callback, err) if (exists) return _error(callback, `Key '${name}' already exists`) - type = type.toLowerCase() - switch (type) { + switch (type.toLowerCase()) { case 'rsa': if (size < 2048) { return _error(callback, `Invalid RSA key size ${size}`) @@ -212,16 +211,21 @@ class Keychain { if (err) return _error(callback, err) keypair.id((err, kid) => { if (err) return _error(callback, err) - - if (type === 'ed25519' || type === 'secp256k1') { - const keypairMarshal = keypair.bytes - self._storeKey(name, kid, keypairMarshal, dsname, callback) - } else { - keypair.export(this._(), (err, pem) => { + keypair.export(this._(), (err, pem) => { + if (err) return _error(callback, err) + const keyInfo = { + name: name, + id: kid + } + const batch = self.store.batch() + batch.put(dsname, pem) + batch.put(DsInfoName(name), JSON.stringify(keyInfo)) + batch.commit((err) => { if (err) return _error(callback, err) - self._storeKey(name, kid, pem, dsname, callback) + + callback(null, keyInfo) }) - } + }) }) }) }) @@ -361,85 +365,76 @@ class Keychain { } /** - * Export an existing key. - * If it's as an RSA key, include a password to export as a PEM encrypted PKCS #8 string + * Export an existing key as a PEM encrypted PKCS #8 string * * @param {string} name - The local key name; must already exist. - * @param {string} password - The password, for RSA keys (optional) + * @param {string} password - The password * @param {function(Error, string)} callback * @returns {undefined} */ exportKey (name, password, callback) { - if (typeof password === 'function' && typeof callback === 'undefined') { - callback = password - password = undefined - } if (!validateKeyName(name)) { return _error(callback, `Invalid key name '${name}'`) } + if (!password) { + return _error(callback, 'Password is required') + } const dsname = DsName(name) this.store.get(dsname, (err, res) => { if (err) { return _error(callback, `Key '${name}' does not exist. ${err.message}`) } - if (password) { - const encKey = res.toString() - crypto.keys.import(encKey, this._(), (err, privateKey) => { - if (err) return _error(callback, err) - privateKey.export(password, callback) - }) - } else { - crypto.keys.unmarshalPrivateKey(res, callback) - } + const pem = res.toString() + crypto.keys.import(pem, this._(), (err, privateKey) => { + if (err) return _error(callback, err) + privateKey.export(password, callback) + }) }) } /** - * Import a new key - * If it's as an RSA key, include a password to import from a PEM encrypted PKCS #8 string + * Import a new key from a PEM encoded PKCS #8 string * * @param {string} name - The local key name; must not already exist. - * @param {string} encKey - The encoded key. If it's an RSA key, it needs to be a PEM encoded PKCS #8 string - * @param {string} password - The password for RSA keys. (optional) + * @param {string} pem - The PEM encoded PKCS #8 string + * @param {string} password - The password. * @param {function(Error, KeyInfo)} callback * @returns {undefined} */ - importKey (name, encKey, password, callback) { + importKey (name, pem, password, callback) { const self = this - if (typeof password === 'function' && typeof callback === 'undefined') { - callback = password - password = undefined - } if (!validateKeyName(name) || name === 'self') { return _error(callback, `Invalid key name '${name}'`) } - if (!encKey) { - return _error(callback, 'The encoded key is required') + if (!pem) { + return _error(callback, 'PEM encoded key is required') } - const dsname = DsName(name) self.store.has(dsname, (err, exists) => { if (err) return _error(callback, err) if (exists) return _error(callback, `Key '${name}' already exists`) - - if (password) { - crypto.keys.import(encKey, password, (err, privateKey) => { - if (err) return _error(callback, 'Cannot read the key, most likely the password is wrong') - privateKey.id((err, kid) => { + crypto.keys.import(pem, password, (err, privateKey) => { + if (err) return _error(callback, 'Cannot read the key, most likely the password is wrong') + privateKey.id((err, kid) => { + if (err) return _error(callback, err) + privateKey.export(this._(), (err, pem) => { if (err) return _error(callback, err) - privateKey.export(this._(), (err, pem) => { + const keyInfo = { + name: name, + id: kid + } + const batch = self.store.batch() + batch.put(dsname, pem) + batch.put(DsInfoName(name), JSON.stringify(keyInfo)) + batch.commit((err) => { if (err) return _error(callback, err) - self._storeKey(name, kid, pem, dsname, callback) + + callback(null, keyInfo) }) }) }) - } else { - encKey.id((err, kid) => { - if (err) return _error(callback, err) - self._storeKey(name, kid, encKey.bytes, dsname, callback) - }) - } + }) }) } @@ -462,28 +457,23 @@ class Keychain { if (err) return _error(callback, err) privateKey.export(this._(), (err, pem) => { if (err) return _error(callback, err) - self._storeKey(name, kid, pem, dsname, callback) + const keyInfo = { + name: name, + id: kid + } + const batch = self.store.batch() + batch.put(dsname, pem) + batch.put(DsInfoName(name), JSON.stringify(keyInfo)) + batch.commit((err) => { + if (err) return _error(callback, err) + + callback(null, keyInfo) + }) }) }) }) } - _storeKey (name, kid, encKey, dsname, callback) { - const self = this - const keyInfo = { - name: name, - id: kid - } - const batch = self.store.batch() - batch.put(dsname, encKey) - batch.put(DsInfoName(name), JSON.stringify(keyInfo)) - batch.commit((err) => { - if (err) return _error(callback, err) - - callback(null, keyInfo) - }) - } - /** * Gets the private key as PEM encoded PKCS #8 string. * diff --git a/test/keychain.spec.js b/test/keychain.spec.js index 9e3c6dc6ab..ed6f1a80f0 100644 --- a/test/keychain.spec.js +++ b/test/keychain.spec.js @@ -13,11 +13,9 @@ const PeerId = require('peer-id') module.exports = (datastore1, datastore2) => { describe('keychain', () => { const passPhrase = 'this is not a secure phrase' - const keyName = 'tajné jméno' - const renamedKeyName = 'ชื่อลับ' - let keyInfo - let ecKeyInfo - let secpKeyInfo + const rsaKeyName = 'tajné jméno' + const renamedRsaKeyName = 'ชื่อลับ' + let rsaKeyInfo let emptyKeystore let ks @@ -82,43 +80,23 @@ module.exports = (datastore1, datastore2) => { }) describe('key', () => { - it('can be an ed25519 key', function (done) { - this.timeout(50 * 1000) - ks.createKey(keyName + 'ed25519', 'ed25519', 2048, (err, info) => { - expect(err).to.not.exist() - expect(info).exist() - ecKeyInfo = info - done() - }) - }) - - it('can be an secp256k1 key', function (done) { - this.timeout(50 * 1000) - ks.createKey(keyName + 'secp256k1', 'secp256k1', 2048, (err, info) => { - expect(err).to.not.exist() - expect(info).exist() - secpKeyInfo = info - done() - }) - }) - it('can be an RSA key', function (done) { this.timeout(50 * 1000) - ks.createKey(keyName, 'rsa', 2048, (err, info) => { + ks.createKey(rsaKeyName, 'rsa', 2048, (err, info) => { expect(err).to.not.exist() expect(info).exist() - keyInfo = info + rsaKeyInfo = info done() }) }) it('has a name and id', () => { - expect(keyInfo).to.have.property('name', keyName) - expect(keyInfo).to.have.property('id') + expect(rsaKeyInfo).to.have.property('name', rsaKeyName) + expect(rsaKeyInfo).to.have.property('id') }) it('is encrypted PEM encoded PKCS #8', (done) => { - ks._getPrivateKey(keyName, (err, pem) => { + ks._getPrivateKey(rsaKeyName, (err, pem) => { expect(err).to.not.exist() expect(pem).to.startsWith('-----BEGIN ENCRYPTED PRIVATE KEY-----') done() @@ -126,7 +104,7 @@ module.exports = (datastore1, datastore2) => { }) it('does not overwrite existing key', (done) => { - ks.createKey(keyName, 'rsa', 2048, (err) => { + ks.createKey(rsaKeyName, 'rsa', 2048, (err) => { expect(err).to.exist() done() }) @@ -179,26 +157,26 @@ module.exports = (datastore1, datastore2) => { ks.listKeys((err, keys) => { expect(err).to.not.exist() expect(keys).to.exist() - const mykey = keys.find((k) => k.name.normalize() === keyName.normalize()) + const mykey = keys.find((k) => k.name.normalize() === rsaKeyName.normalize()) expect(mykey).to.exist() done() }) }) it('finds a key by name', (done) => { - ks.findKeyByName(keyName, (err, key) => { + ks.findKeyByName(rsaKeyName, (err, key) => { expect(err).to.not.exist() expect(key).to.exist() - expect(key).to.deep.equal(keyInfo) + expect(key).to.deep.equal(rsaKeyInfo) done() }) }) it('finds a key by id', (done) => { - ks.findKeyById(keyInfo.id, (err, key) => { + ks.findKeyById(rsaKeyInfo.id, (err, key) => { expect(err).to.not.exist() expect(key).to.exist() - expect(key).to.deep.equal(keyInfo) + expect(key).to.deep.equal(rsaKeyInfo) done() }) }) @@ -233,14 +211,14 @@ module.exports = (datastore1, datastore2) => { }) it('requires plain data as a Buffer', (done) => { - ks.cms.encrypt(keyName, 'plain data', (err, msg) => { + ks.cms.encrypt(rsaKeyName, 'plain data', (err, msg) => { expect(err).to.exist() done() }) }) it('encrypts', (done) => { - ks.cms.encrypt(keyName, plainData, (err, msg) => { + ks.cms.encrypt(rsaKeyName, plainData, (err, msg) => { expect(err).to.not.exist() expect(msg).to.exist() expect(msg).to.be.instanceOf(Buffer) @@ -267,7 +245,7 @@ module.exports = (datastore1, datastore2) => { emptyKeystore.cms.decrypt(cms, (err, plain) => { expect(err).to.exist() expect(err).to.have.property('missingKeys') - expect(err.missingKeys).to.eql([keyInfo.id]) + expect(err.missingKeys).to.eql([rsaKeyInfo.id]) done() }) }) @@ -284,11 +262,9 @@ module.exports = (datastore1, datastore2) => { describe('exported key', () => { let pemKey - let ed25519Key - let secp256k1Key it('is a PKCS #8 encrypted pem', (done) => { - ks.exportKey(keyName, 'password', (err, pem) => { + ks.exportKey(rsaKeyName, 'password', (err, pem) => { expect(err).to.not.exist() expect(pem).to.startsWith('-----BEGIN ENCRYPTED PRIVATE KEY-----') pemKey = pem @@ -300,49 +276,13 @@ module.exports = (datastore1, datastore2) => { ks.importKey('imported-key', pemKey, 'password', (err, key) => { expect(err).to.not.exist() expect(key.name).to.equal('imported-key') - expect(key.id).to.equal(keyInfo.id) - done() - }) - }) - - it('can export ed25519 key', (done) => { - ks.exportKey(keyName + 'ed25519', (err, key) => { - expect(err).to.not.exist() - ed25519Key = key - expect(key).to.exist() - done() - }) - }) - - it('ed25519 key can be imported', (done) => { - ks.importKey('imported-key-ed25199', ed25519Key, (err, key) => { - expect(err).to.not.exist() - expect(key.name).to.equal('imported-key-ed25199') - expect(key.id).to.equal(ecKeyInfo.id) - done() - }) - }) - - it('can export secp256k1 key', (done) => { - ks.exportKey(keyName + 'secp256k1', (err, key) => { - expect(err).to.not.exist() - secp256k1Key = key - expect(key).to.exist() - done() - }) - }) - - it('secp256k1 key can be imported', (done) => { - ks.importKey('imported-key-secp256k1', secp256k1Key, (err, key) => { - expect(err).to.not.exist() - expect(key.name).to.equal('imported-key-secp256k1') - expect(key.id).to.equal(secpKeyInfo.id) + expect(key.id).to.equal(rsaKeyInfo.id) done() }) }) it('cannot be imported as an existing key name', (done) => { - ks.importKey(keyName, pemKey, 'password', (err, key) => { + ks.importKey(rsaKeyName, pemKey, 'password', (err, key) => { expect(err).to.exist() done() }) @@ -402,40 +342,40 @@ module.exports = (datastore1, datastore2) => { describe('rename', () => { it('requires an existing key name', (done) => { - ks.renameKey('not-there', renamedKeyName, (err) => { + ks.renameKey('not-there', renamedRsaKeyName, (err) => { expect(err).to.exist() done() }) }) it('requires a valid new key name', (done) => { - ks.renameKey(keyName, '..\not-valid', (err) => { + ks.renameKey(rsaKeyName, '..\not-valid', (err) => { expect(err).to.exist() done() }) }) it('does not overwrite existing key', (done) => { - ks.renameKey(keyName, keyName, (err) => { + ks.renameKey(rsaKeyName, rsaKeyName, (err) => { expect(err).to.exist() done() }) }) it('cannot create the "self" key', (done) => { - ks.renameKey(keyName, 'self', (err) => { + ks.renameKey(rsaKeyName, 'self', (err) => { expect(err).to.exist() done() }) }) it('removes the existing key name', (done) => { - ks.renameKey(keyName, renamedKeyName, (err, key) => { + ks.renameKey(rsaKeyName, renamedRsaKeyName, (err, key) => { expect(err).to.not.exist() expect(key).to.exist() - expect(key).to.have.property('name', renamedKeyName) - expect(key).to.have.property('id', keyInfo.id) - ks.findKeyByName(keyName, (err, key) => { + expect(key).to.have.property('name', renamedRsaKeyName) + expect(key).to.have.property('id', rsaKeyInfo.id) + ks.findKeyByName(rsaKeyName, (err, key) => { expect(err).to.exist() done() }) @@ -443,20 +383,20 @@ module.exports = (datastore1, datastore2) => { }) it('creates the new key name', (done) => { - ks.findKeyByName(renamedKeyName, (err, key) => { + ks.findKeyByName(renamedRsaKeyName, (err, key) => { expect(err).to.not.exist() expect(key).to.exist() - expect(key).to.have.property('name', renamedKeyName) + expect(key).to.have.property('name', renamedRsaKeyName) done() }) }) it('does not change the key ID', (done) => { - ks.findKeyByName(renamedKeyName, (err, key) => { + ks.findKeyByName(renamedRsaKeyName, (err, key) => { expect(err).to.not.exist() expect(key).to.exist() - expect(key).to.have.property('name', renamedKeyName) - expect(key).to.have.property('id', keyInfo.id) + expect(key).to.have.property('name', renamedRsaKeyName) + expect(key).to.have.property('id', rsaKeyInfo.id) done() }) }) @@ -478,11 +418,11 @@ module.exports = (datastore1, datastore2) => { }) it('can remove a known key', (done) => { - ks.removeKey(renamedKeyName, (err, key) => { + ks.removeKey(renamedRsaKeyName, (err, key) => { expect(err).to.not.exist() expect(key).to.exist() - expect(key).to.have.property('name', renamedKeyName) - expect(key).to.have.property('id', keyInfo.id) + expect(key).to.have.property('name', renamedRsaKeyName) + expect(key).to.have.property('id', rsaKeyInfo.id) done() }) }) From 4e4d3d4b6f25e28a7bb92f9d777d8bdde6f8e1d9 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Thu, 14 Mar 2019 22:38:45 +0000 Subject: [PATCH 063/102] chore: update contributors --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 21b2a6fdec..97176ef9dd 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "libp2p-keychain", - "version": "0.4.0", + "version": "0.4.1", "description": "Key management and cryptographically protected messages", "leadMaintainer": "Vasco Santos ", "main": "src/index.js", @@ -71,8 +71,8 @@ "Maciej Krüger ", "Masahiro Saito ", "Richard Schneider ", - "Vasco Santos ", "Vasco Santos ", + "Vasco Santos ", "Victor Bjelkholm " ] } From a5fd967c028efc54effa92f597918fc98c462320 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Thu, 14 Mar 2019 22:38:45 +0000 Subject: [PATCH 064/102] chore: release version v0.4.1 --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7ad54ab284..7df7adbdf5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ + +## [0.4.1](https://github.com/libp2p/js-libp2p-keychain/compare/v0.4.0...v0.4.1) (2019-03-14) + + + # [0.4.0](https://github.com/libp2p/js-libp2p-keychain/compare/v0.3.6...v0.4.0) (2019-02-26) From ef4737494115d6bb4f75eada0df4eac449f4daeb Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Thu, 11 Apr 2019 11:20:18 +0100 Subject: [PATCH 065/102] chore: add discourse badge (#34) --- README.md | 1 + package.json | 12 ++++++------ 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 24bcf0d667..4ff4abe68a 100644 --- a/README.md +++ b/README.md @@ -3,6 +3,7 @@ [![](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://ipn.io) [![](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](http://ipfs.io/) [![](https://img.shields.io/badge/freenode-%23ipfs-blue.svg?style=flat-square)](http://webchat.freenode.net/?channels=%23ipfs) +[![Discourse posts](https://img.shields.io/discourse/https/discuss.libp2p.io/posts.svg)](https://discuss.libp2p.io) [![standard-readme compliant](https://img.shields.io/badge/standard--readme-OK-green.svg?style=flat-square)](https://github.com/RichardLitt/standard-readme) [![Coverage Status](https://coveralls.io/repos/github/libp2p/js-libp2p-keychain/badge.svg?branch=master)](https://coveralls.io/github/libp2p/js-libp2p-keychain?branch=master) [![Travis CI](https://travis-ci.org/libp2p/js-libp2p-keychain.svg?branch=master)](https://travis-ci.org/libp2p/js-libp2p-keychain) diff --git a/package.json b/package.json index 97176ef9dd..e048791a31 100644 --- a/package.json +++ b/package.json @@ -42,22 +42,22 @@ }, "homepage": "https://github.com/libp2p/js-libp2p-keychain#readme", "dependencies": { - "async": "^2.6.1", + "async": "^2.6.2", "interface-datastore": "~0.6.0", "libp2p-crypto": "~0.16.1", "merge-options": "^1.0.1", "node-forge": "~0.7.6", - "pull-stream": "^3.6.8", + "pull-stream": "^3.6.9", "sanitize-filename": "^1.6.1" }, "devDependencies": { - "aegir": "^18.0.3", + "aegir": "^18.2.1", "chai": "^4.2.0", - "chai-string": "^1.4.0", - "datastore-fs": "~0.7.0", + "chai-string": "^1.5.0", + "datastore-fs": "~0.8.0", "datastore-level": "~0.10.0", "dirty-chai": "^2.0.1", - "level-js": "^4.0.0", + "level-js": "^4.0.1", "mocha": "^5.2.0", "multihashes": "~0.4.14", "peer-id": "~0.12.2", From 7051b9c530821e1fd21e28fffee5a17f1e2d46e9 Mon Sep 17 00:00:00 2001 From: Alex Potsides Date: Thu, 13 Jun 2019 14:35:12 +0100 Subject: [PATCH 066/102] fix: throw errors with correct stack trace (#35) The stack trace of thrown error objects is created when the object is instantiated - if we defer to a function to create the error we end up with misleading stack traces. This PR instantiates errors where errors occur and also uses the `err-code` module to add a `.code` property so we don't have to depend on string error messages for the type of error that was thrown. --- package.json | 1 + src/cms.js | 14 +++++++------ src/keychain.js | 49 ++++++++++++++++++++++--------------------- test/keychain.spec.js | 14 +++++++++++++ 4 files changed, 48 insertions(+), 30 deletions(-) diff --git a/package.json b/package.json index e048791a31..7521e6d006 100644 --- a/package.json +++ b/package.json @@ -43,6 +43,7 @@ "homepage": "https://github.com/libp2p/js-libp2p-keychain#readme", "dependencies": { "async": "^2.6.2", + "err-code": "^1.1.2", "interface-datastore": "~0.6.0", "libp2p-crypto": "~0.16.1", "merge-options": "^1.0.1", diff --git a/src/cms.js b/src/cms.js index 90d7d85fdc..d086407ef8 100644 --- a/src/cms.js +++ b/src/cms.js @@ -8,6 +8,7 @@ require('node-forge/lib/pkcs7') require('node-forge/lib/pbe') const forge = require('node-forge/lib/forge') const util = require('./util') +const errcode = require('err-code') /** * Cryptographic Message Syntax (aka PKCS #7) @@ -26,7 +27,7 @@ class CMS { */ constructor (keychain) { if (!keychain) { - throw new Error('keychain is required') + throw errcode(new Error('keychain is required'), 'ERR_KEYCHAIN_REQUIRED') } this.keychain = keychain @@ -47,7 +48,7 @@ class CMS { const done = (err, result) => setImmediate(() => callback(err, result)) if (!Buffer.isBuffer(plain)) { - return done(new Error('Plain data must be a Buffer')) + return done(errcode(new Error('Plain data must be a Buffer'), 'ERR_INVALID_PARAMS')) } series([ @@ -93,7 +94,7 @@ class CMS { const done = (err, result) => setImmediate(() => callback(err, result)) if (!Buffer.isBuffer(cmsData)) { - return done(new Error('CMS data is required')) + return done(errcode(new Error('CMS data is required'), 'ERR_INVALID_PARAMS')) } const self = this @@ -103,7 +104,7 @@ class CMS { const obj = forge.asn1.fromDer(buf) cms = forge.pkcs7.messageFromAsn1(obj) } catch (err) { - return done(new Error('Invalid CMS: ' + err.message)) + return done(errcode(new Error('Invalid CMS: ' + err.message), 'ERR_INVALID_CMS')) } // Find a recipient whose key we hold. We only deal with recipient certs @@ -124,8 +125,9 @@ class CMS { if (err) return done(err) if (!r) { const missingKeys = recipients.map(r => r.keyId) - err = new Error('Decryption needs one of the key(s): ' + missingKeys.join(', ')) - err.missingKeys = missingKeys + err = errcode(new Error('Decryption needs one of the key(s): ' + missingKeys.join(', ')), 'ERR_MISSING_KEYS', { + missingKeys + }) return done(err) } diff --git a/src/keychain.js b/src/keychain.js index cecc3207e7..f8f8889556 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -8,6 +8,7 @@ const DS = require('interface-datastore') const collect = require('pull-stream/sinks/collect') const pull = require('pull-stream/pull') const CMS = require('./cms') +const errcode = require('err-code') const keyPrefix = '/pkcs8/' const infoPrefix = '/info/' @@ -50,7 +51,7 @@ function _error (callback, err) { const min = 200 const max = 1000 const delay = Math.random() * (max - min) + min - if (typeof err === 'string') err = new Error(err) + setTimeout(callback, delay, err, null) } @@ -181,26 +182,26 @@ class Keychain { const self = this if (!validateKeyName(name) || name === 'self') { - return _error(callback, `Invalid key name '${name}'`) + return _error(callback, errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) } if (typeof type !== 'string') { - return _error(callback, `Invalid key type '${type}'`) + return _error(callback, errcode(new Error(`Invalid key type '${type}'`), 'ERR_INVALID_KEY_TYPE')) } if (!Number.isSafeInteger(size)) { - return _error(callback, `Invalid key size '${size}'`) + return _error(callback, errcode(new Error(`Invalid key size '${size}'`), 'ERR_INVALID_KEY_SIZE')) } const dsname = DsName(name) self.store.has(dsname, (err, exists) => { if (err) return _error(callback, err) - if (exists) return _error(callback, `Key '${name}' already exists`) + if (exists) return _error(callback, errcode(new Error(`Key '${name}' already exists`), 'ERR_KEY_ALREADY_EXISTS')) switch (type.toLowerCase()) { case 'rsa': if (size < 2048) { - return _error(callback, `Invalid RSA key size ${size}`) + return _error(callback, errcode(new Error(`Invalid RSA key size ${size}`), 'ERR_INVALID_KEY_SIZE')) } break default: @@ -278,13 +279,13 @@ class Keychain { */ findKeyByName (name, callback) { if (!validateKeyName(name)) { - return _error(callback, `Invalid key name '${name}'`) + return _error(callback, errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) } const dsname = DsInfoName(name) this.store.get(dsname, (err, res) => { if (err) { - return _error(callback, `Key '${name}' does not exist. ${err.message}`) + return _error(callback, errcode(new Error(`Key '${name}' does not exist. ${err.message}`), 'ERR_KEY_NOT_FOUND')) } callback(null, JSON.parse(res.toString())) @@ -301,7 +302,7 @@ class Keychain { removeKey (name, callback) { const self = this if (!validateKeyName(name) || name === 'self') { - return _error(callback, `Invalid key name '${name}'`) + return _error(callback, errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) } const dsname = DsName(name) self.findKeyByName(name, (err, keyinfo) => { @@ -327,10 +328,10 @@ class Keychain { renameKey (oldName, newName, callback) { const self = this if (!validateKeyName(oldName) || oldName === 'self') { - return _error(callback, `Invalid old key name '${oldName}'`) + return _error(callback, errcode(new Error(`Invalid old key name '${oldName}'`), 'ERR_OLD_KEY_NAME_INVALID')) } if (!validateKeyName(newName) || newName === 'self') { - return _error(callback, `Invalid new key name '${newName}'`) + return _error(callback, errcode(new Error(`Invalid new key name '${newName}'`), 'ERR_NEW_KEY_NAME_INVALID')) } const oldDsname = DsName(oldName) const newDsname = DsName(newName) @@ -338,12 +339,12 @@ class Keychain { const newInfoName = DsInfoName(newName) this.store.get(oldDsname, (err, res) => { if (err) { - return _error(callback, `Key '${oldName}' does not exist. ${err.message}`) + return _error(callback, errcode(new Error(`Key '${oldName}' does not exist. ${err.message}`), 'ERR_KEY_NOT_FOUND')) } const pem = res.toString() self.store.has(newDsname, (err, exists) => { if (err) return _error(callback, err) - if (exists) return _error(callback, `Key '${newName}' already exists`) + if (exists) return _error(callback, errcode(new Error(`Key '${newName}' already exists`), 'ERR_KEY_ALREADY_EXISTS')) self.store.get(oldInfoName, (err, res) => { if (err) return _error(callback, err) @@ -374,16 +375,16 @@ class Keychain { */ exportKey (name, password, callback) { if (!validateKeyName(name)) { - return _error(callback, `Invalid key name '${name}'`) + return _error(callback, errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) } if (!password) { - return _error(callback, 'Password is required') + return _error(callback, errcode(new Error('Password is required'), 'ERR_PASSWORD_REQUIRED')) } const dsname = DsName(name) this.store.get(dsname, (err, res) => { if (err) { - return _error(callback, `Key '${name}' does not exist. ${err.message}`) + return _error(callback, errcode(new Error(`Key '${name}' does not exist. ${err.message}`), 'ERR_KEY_NOT_FOUND')) } const pem = res.toString() crypto.keys.import(pem, this._(), (err, privateKey) => { @@ -405,7 +406,7 @@ class Keychain { importKey (name, pem, password, callback) { const self = this if (!validateKeyName(name) || name === 'self') { - return _error(callback, `Invalid key name '${name}'`) + return _error(callback, errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) } if (!pem) { return _error(callback, 'PEM encoded key is required') @@ -413,9 +414,9 @@ class Keychain { const dsname = DsName(name) self.store.has(dsname, (err, exists) => { if (err) return _error(callback, err) - if (exists) return _error(callback, `Key '${name}' already exists`) + if (exists) return _error(callback, errcode(new Error(`Key '${name}' already exists`), 'ERR_KEY_ALREADY_EXISTS')) crypto.keys.import(pem, password, (err, privateKey) => { - if (err) return _error(callback, 'Cannot read the key, most likely the password is wrong') + if (err) return _error(callback, errcode(new Error('Cannot read the key, most likely the password is wrong'), 'ERR_CANNOT_READ_KEY')) privateKey.id((err, kid) => { if (err) return _error(callback, err) privateKey.export(this._(), (err, pem) => { @@ -441,17 +442,17 @@ class Keychain { importPeer (name, peer, callback) { const self = this if (!validateKeyName(name)) { - return _error(callback, `Invalid key name '${name}'`) + return _error(callback, errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) } if (!peer || !peer.privKey) { - return _error(callback, 'Peer.privKey is required') + return _error(callback, errcode(new Error('Peer.privKey is required'), 'ERR_MISSING_PRIVATE_KEY')) } const privateKey = peer.privKey const dsname = DsName(name) self.store.has(dsname, (err, exists) => { if (err) return _error(callback, err) - if (exists) return _error(callback, `Key '${name}' already exists`) + if (exists) return _error(callback, errcode(new Error(`Key '${name}' already exists`), 'ERR_KEY_ALREADY_EXISTS')) privateKey.id((err, kid) => { if (err) return _error(callback, err) @@ -484,11 +485,11 @@ class Keychain { */ _getPrivateKey (name, callback) { if (!validateKeyName(name)) { - return _error(callback, `Invalid key name '${name}'`) + return _error(callback, errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) } this.store.get(DsName(name), (err, res) => { if (err) { - return _error(callback, `Key '${name}' does not exist. ${err.message}`) + return _error(callback, errcode(new Error(`Key '${name}' does not exist. ${err.message}`), 'ERR_KEY_NOT_FOUND')) } callback(null, res.toString()) }) diff --git a/test/keychain.spec.js b/test/keychain.spec.js index ed6f1a80f0..bcaa6671d0 100644 --- a/test/keychain.spec.js +++ b/test/keychain.spec.js @@ -59,22 +59,27 @@ module.exports = (datastore1, datastore2) => { ks.removeKey('../../nasty', (err) => { expect(err).to.exist() expect(err).to.have.property('message', 'Invalid key name \'../../nasty\'') + expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') }) ks.removeKey('', (err) => { expect(err).to.exist() expect(err).to.have.property('message', 'Invalid key name \'\'') + expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') }) ks.removeKey(' ', (err) => { expect(err).to.exist() expect(err).to.have.property('message', 'Invalid key name \' \'') + expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') }) ks.removeKey(null, (err) => { expect(err).to.exist() expect(err).to.have.property('message', 'Invalid key name \'null\'') + expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') }) ks.removeKey(undefined, (err) => { expect(err).to.exist() expect(err).to.have.property('message', 'Invalid key name \'undefined\'') + expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') }) }) }) @@ -106,6 +111,7 @@ module.exports = (datastore1, datastore2) => { it('does not overwrite existing key', (done) => { ks.createKey(rsaKeyName, 'rsa', 2048, (err) => { expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_KEY_ALREADY_EXISTS') done() }) }) @@ -146,6 +152,7 @@ module.exports = (datastore1, datastore2) => { ks.createKey('bad-nist-rsa', 'rsa', 1024, (err) => { expect(err).to.exist() expect(err).to.have.property('message', 'Invalid RSA key size 1024') + expect(err).to.have.property('code', 'ERR_INVALID_KEY_SIZE') done() }) }) @@ -246,6 +253,7 @@ module.exports = (datastore1, datastore2) => { expect(err).to.exist() expect(err).to.have.property('missingKeys') expect(err.missingKeys).to.eql([rsaKeyInfo.id]) + expect(err).to.have.property('code', 'ERR_MISSING_KEYS') done() }) }) @@ -344,6 +352,7 @@ module.exports = (datastore1, datastore2) => { it('requires an existing key name', (done) => { ks.renameKey('not-there', renamedRsaKeyName, (err) => { expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_KEY_NOT_FOUND') done() }) }) @@ -351,6 +360,7 @@ module.exports = (datastore1, datastore2) => { it('requires a valid new key name', (done) => { ks.renameKey(rsaKeyName, '..\not-valid', (err) => { expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_NEW_KEY_NAME_INVALID') done() }) }) @@ -358,6 +368,7 @@ module.exports = (datastore1, datastore2) => { it('does not overwrite existing key', (done) => { ks.renameKey(rsaKeyName, rsaKeyName, (err) => { expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_KEY_ALREADY_EXISTS') done() }) }) @@ -365,6 +376,7 @@ module.exports = (datastore1, datastore2) => { it('cannot create the "self" key', (done) => { ks.renameKey(rsaKeyName, 'self', (err) => { expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_NEW_KEY_NAME_INVALID') done() }) }) @@ -406,6 +418,7 @@ module.exports = (datastore1, datastore2) => { it('cannot remove the "self" key', (done) => { ks.removeKey('self', (err) => { expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') done() }) }) @@ -413,6 +426,7 @@ module.exports = (datastore1, datastore2) => { it('cannot remove an unknown key', (done) => { ks.removeKey('not-there', (err) => { expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_KEY_NOT_FOUND') done() }) }) From 74cb4d47754d0d96c1929c125eaaa78a1f047930 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Thu, 13 Jun 2019 14:50:37 +0100 Subject: [PATCH 067/102] chore: update contributors --- package.json | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 7521e6d006..b6526b353a 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "libp2p-keychain", - "version": "0.4.1", + "version": "0.4.2", "description": "Key management and cryptographically protected messages", "leadMaintainer": "Vasco Santos ", "main": "src/index.js", @@ -67,13 +67,14 @@ "contributors": [ "Alan Shaw ", "Alberto Elias ", + "Alex Potsides ", "David Dias ", "Hugo Dias ", "Maciej Krüger ", "Masahiro Saito ", "Richard Schneider ", - "Vasco Santos ", "Vasco Santos ", + "Vasco Santos ", "Victor Bjelkholm " ] } From 717112bdf84db2df647be7993e3d6cdea3428d66 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Thu, 13 Jun 2019 14:50:37 +0100 Subject: [PATCH 068/102] chore: release version v0.4.2 --- CHANGELOG.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7df7adbdf5..4b40f5b154 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,13 @@ + +## [0.4.2](https://github.com/libp2p/js-libp2p-keychain/compare/v0.4.1...v0.4.2) (2019-06-13) + + +### Bug Fixes + +* throw errors with correct stack trace ([#35](https://github.com/libp2p/js-libp2p-keychain/issues/35)) ([7051b9c](https://github.com/libp2p/js-libp2p-keychain/commit/7051b9c)) + + + ## [0.4.1](https://github.com/libp2p/js-libp2p-keychain/compare/v0.4.0...v0.4.1) (2019-03-14) From dda315a9c8972a42b5bd7ef72afbb1a8f8f44f10 Mon Sep 17 00:00:00 2001 From: Jacob Heun Date: Fri, 16 Aug 2019 13:12:47 +0200 Subject: [PATCH 069/102] refactor: use async/await instead of callbacks (#37) BREAKING CHANGE: The api now uses async/await instead of callbacks. Co-Authored-By: Vasco Santos --- .gitignore | 1 + .travis.yml | 8 +- README.md | 40 ++-- package.json | 34 ++- src/cms.js | 105 ++++----- src/keychain.js | 375 +++++++++++++++---------------- src/util.js | 24 +- test/browser.js | 25 +-- test/cms-interop.js | 25 +-- test/keychain.spec.js | 500 ++++++++++++++++++------------------------ test/node.js | 24 +- test/peerid.js | 72 ++---- 12 files changed, 537 insertions(+), 696 deletions(-) diff --git a/.gitignore b/.gitignore index b64f085266..3da57e818c 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ logs *.log coverage +.nyc_output # Runtime data pids diff --git a/.travis.yml b/.travis.yml index 37005a25df..2061bd3272 100644 --- a/.travis.yml +++ b/.travis.yml @@ -7,6 +7,7 @@ stages: node_js: - '10' + - '12' os: - linux @@ -20,8 +21,7 @@ jobs: include: - stage: check script: - - npx aegir commitlint --travis - - npx aegir dep-check -- -i wrtc -i electron-webrtc + - npx aegir dep-check - npm run lint - stage: test @@ -29,14 +29,14 @@ jobs: addons: chrome: stable script: - - npx aegir test -t browser + - npx aegir test -t browser -t webworker - stage: test name: firefox addons: firefox: latest script: - - npx aegir test -t browser -- --browsers FirefoxHeadless + - npx aegir test -t browser -t webworker -- --browsers FirefoxHeadless notifications: email: false diff --git a/README.md b/README.md index 4ff4abe68a..37829b48a7 100644 --- a/README.md +++ b/README.md @@ -1,17 +1,13 @@ # js-libp2p-keychain -[![](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://ipn.io) -[![](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](http://ipfs.io/) -[![](https://img.shields.io/badge/freenode-%23ipfs-blue.svg?style=flat-square)](http://webchat.freenode.net/?channels=%23ipfs) +[![](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://protocol.ai) +[![](https://img.shields.io/badge/project-libp2p-yellow.svg?style=flat-square)](http://libp2p.io/) +[![](https://img.shields.io/badge/freenode-%23libp2p-yellow.svg?style=flat-square)](http://webchat.freenode.net/?channels=%23libp2p) [![Discourse posts](https://img.shields.io/discourse/https/discuss.libp2p.io/posts.svg)](https://discuss.libp2p.io) -[![standard-readme compliant](https://img.shields.io/badge/standard--readme-OK-green.svg?style=flat-square)](https://github.com/RichardLitt/standard-readme) -[![Coverage Status](https://coveralls.io/repos/github/libp2p/js-libp2p-keychain/badge.svg?branch=master)](https://coveralls.io/github/libp2p/js-libp2p-keychain?branch=master) -[![Travis CI](https://travis-ci.org/libp2p/js-libp2p-keychain.svg?branch=master)](https://travis-ci.org/libp2p/js-libp2p-keychain) -[![Circle CI](https://circleci.com/gh/libp2p/js-libp2p-keychain.svg?style=svg)](https://circleci.com/gh/libp2p/js-libp2p-keychain) +[![](https://img.shields.io/codecov/c/github/libp2p/js-libp2p-keychain.svg?style=flat-square)](https://codecov.io/gh/libp2p/js-libp2p-keychain) +[![](https://img.shields.io/travis/libp2p/js-libp2p-keychain.svg?style=flat-square)](https://travis-ci.com/libp2p/js-libp2p-keychain) [![Dependency Status](https://david-dm.org/libp2p/js-libp2p-keychain.svg?style=flat-square)](https://david-dm.org/libp2p/js-libp2p-keychain) [![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat-square)](https://github.com/feross/standard) -![](https://img.shields.io/badge/npm-%3E%3D3.0.0-orange.svg?style=flat-square) -![](https://img.shields.io/badge/Node.js-%3E%3D6.0.0-orange.svg?style=flat-square) > A secure key chain for libp2p in JavaScript @@ -55,23 +51,23 @@ const keychain = new Keychain(datastore, opts) Managing a key -- `createKey (name, type, size, callback)` -- `renameKey (oldName, newName, callback)` -- `removeKey (name, callback)` -- `exportKey (name, password, callback)` -- `importKey (name, pem, password, callback)` -- `importPeer (name, peer, callback)` +- `async createKey (name, type, size)` +- `async renameKey (oldName, newName)` +- `async removeKey (name)` +- `async exportKey (name, password)` +- `async importKey (name, pem, password)` +- `async importPeer (name, peer)` A naming service for a key -- `listKeys (callback)` -- `findKeyById (id, callback)` -- `findKeyByName (name, callback)` +- `async listKeys ()` +- `async findKeyById (id)` +- `async findKeyByName (name)` Cryptographically protected messages -- `cms.encrypt (name, plain, callback)` -- `cms.decrypt (cmsData, callback)` +- `async cms.encrypt (name, plain)` +- `async cms.decrypt (cmsData)` ### KeyInfo @@ -116,11 +112,11 @@ CMS, aka [PKCS #7](https://en.wikipedia.org/wiki/PKCS) and [RFC 5652](https://to ## Contribute -Feel free to join in. All welcome. Open an [issue](https://github.com/libp2p/js-libp2p-crypto/issues)! +Feel free to join in. All welcome. Open an [issue](https://github.com/libp2p/js-libp2p-keychain/issues)! This repository falls under the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md). -[![](https://cdn.rawgit.com/jbenet/contribute-ipfs-gif/master/img/contribute.gif)](https://github.com/ipfs/community/blob/master/contributing.md) +[![](https://cdn.rawgit.com/jbenet/contribute-ipfs-gif/master/img/contribute.gif)](https://github.com/ipfs/community/blob/master/CONTRIBUTING.md) ## License diff --git a/package.json b/package.json index b6526b353a..eb3d3085c1 100644 --- a/package.json +++ b/package.json @@ -7,21 +7,19 @@ "scripts": { "lint": "aegir lint", "build": "aegir build", + "coverage": "nyc --reporter=text --reporter=lcov npm run test:node", "test": "aegir test -t node -t browser", "test:node": "aegir test -t node", "test:browser": "aegir test -t browser", "release": "aegir release", "release-minor": "aegir release --type minor", - "release-major": "aegir release --type major", - "coverage": "aegir coverage", - "coverage-publish": "aegir coverage publish" + "release-major": "aegir release --type major" }, "pre-push": [ - "lint", - "test" + "lint" ], "engines": { - "node": ">=6.0.0", + "node": ">=10.0.0", "npm": ">=3.0.0" }, "repository": { @@ -42,26 +40,24 @@ }, "homepage": "https://github.com/libp2p/js-libp2p-keychain#readme", "dependencies": { - "async": "^2.6.2", - "err-code": "^1.1.2", - "interface-datastore": "~0.6.0", - "libp2p-crypto": "~0.16.1", + "err-code": "^2.0.0", + "interface-datastore": "^0.7.0", + "libp2p-crypto": "^0.17.0", "merge-options": "^1.0.1", - "node-forge": "~0.7.6", - "pull-stream": "^3.6.9", + "node-forge": "^0.8.5", "sanitize-filename": "^1.6.1" }, "devDependencies": { - "aegir": "^18.2.1", + "aegir": "^20.0.0", "chai": "^4.2.0", "chai-string": "^1.5.0", - "datastore-fs": "~0.8.0", - "datastore-level": "~0.10.0", + "datastore-fs": "^0.9.0", + "datastore-level": "^0.12.1", "dirty-chai": "^2.0.1", - "level-js": "^4.0.1", - "mocha": "^5.2.0", - "multihashes": "~0.4.14", - "peer-id": "~0.12.2", + "level": "^5.0.1", + "multihashes": "^0.4.15", + "peer-id": "^0.13.2", + "promisify-es6": "^1.0.3", "rimraf": "^2.6.3" }, "contributors": [ diff --git a/src/cms.js b/src/cms.js index d086407ef8..9bec4b9451 100644 --- a/src/cms.js +++ b/src/cms.js @@ -1,13 +1,9 @@ 'use strict' -const setImmediate = require('async/setImmediate') -const series = require('async/series') -const detect = require('async/detect') -const waterfall = require('async/waterfall') require('node-forge/lib/pkcs7') require('node-forge/lib/pbe') const forge = require('node-forge/lib/forge') -const util = require('./util') +const { certificateForKey, findAsync } = require('./util') const errcode = require('err-code') /** @@ -40,44 +36,27 @@ class CMS { * * @param {string} name - The local key name. * @param {Buffer} plain - The data to encrypt. - * @param {function(Error, Buffer)} callback * @returns {undefined} */ - encrypt (name, plain, callback) { - const self = this - const done = (err, result) => setImmediate(() => callback(err, result)) - + async encrypt (name, plain) { if (!Buffer.isBuffer(plain)) { - return done(errcode(new Error('Plain data must be a Buffer'), 'ERR_INVALID_PARAMS')) + throw errcode(new Error('Plain data must be a Buffer'), 'ERR_INVALID_PARAMS') } - series([ - (cb) => self.keychain.findKeyByName(name, cb), - (cb) => self.keychain._getPrivateKey(name, cb) - ], (err, results) => { - if (err) return done(err) - - let key = results[0] - let pem = results[1] - try { - const privateKey = forge.pki.decryptRsaPrivateKey(pem, self.keychain._()) - util.certificateForKey(key, privateKey, (err, certificate) => { - if (err) return callback(err) + const key = await this.keychain.findKeyByName(name) + const pem = await this.keychain._getPrivateKey(name) + const privateKey = forge.pki.decryptRsaPrivateKey(pem, this.keychain._()) + const certificate = await certificateForKey(key, privateKey) - // create a p7 enveloped message - const p7 = forge.pkcs7.createEnvelopedData() - p7.addRecipient(certificate) - p7.content = forge.util.createBuffer(plain) - p7.encrypt() + // create a p7 enveloped message + const p7 = forge.pkcs7.createEnvelopedData() + p7.addRecipient(certificate) + p7.content = forge.util.createBuffer(plain) + p7.encrypt() - // convert message to DER - const der = forge.asn1.toDer(p7.toAsn1()).getBytes() - done(null, Buffer.from(der, 'binary')) - }) - } catch (err) { - done(err) - } - }) + // convert message to DER + const der = forge.asn1.toDer(p7.toAsn1()).getBytes() + return Buffer.from(der, 'binary') } /** @@ -87,24 +66,20 @@ class CMS { * exists, an Error is returned with the property 'missingKeys'. It is array of key ids. * * @param {Buffer} cmsData - The CMS encrypted data to decrypt. - * @param {function(Error, Buffer)} callback * @returns {undefined} */ - decrypt (cmsData, callback) { - const done = (err, result) => setImmediate(() => callback(err, result)) - + async decrypt (cmsData) { if (!Buffer.isBuffer(cmsData)) { - return done(errcode(new Error('CMS data is required'), 'ERR_INVALID_PARAMS')) + throw errcode(new Error('CMS data is required'), 'ERR_INVALID_PARAMS') } - const self = this let cms try { const buf = forge.util.createBuffer(cmsData.toString('binary')) const obj = forge.asn1.fromDer(buf) cms = forge.pkcs7.messageFromAsn1(obj) } catch (err) { - return done(errcode(new Error('Invalid CMS: ' + err.message), 'ERR_INVALID_CMS')) + throw errcode(new Error('Invalid CMS: ' + err.message), 'ERR_INVALID_CMS') } // Find a recipient whose key we hold. We only deal with recipient certs @@ -118,31 +93,29 @@ class CMS { keyId: r.issuer.find(a => a.shortName === 'CN').value } }) - detect( - recipients, - (r, cb) => self.keychain.findKeyById(r.keyId, (err, info) => cb(null, !err && info)), - (err, r) => { - if (err) return done(err) - if (!r) { - const missingKeys = recipients.map(r => r.keyId) - err = errcode(new Error('Decryption needs one of the key(s): ' + missingKeys.join(', ')), 'ERR_MISSING_KEYS', { - missingKeys - }) - return done(err) - } - waterfall([ - (cb) => self.keychain.findKeyById(r.keyId, cb), - (key, cb) => self.keychain._getPrivateKey(key.name, cb) - ], (err, pem) => { - if (err) return done(err) - - const privateKey = forge.pki.decryptRsaPrivateKey(pem, self.keychain._()) - cms.decrypt(r.recipient, privateKey) - done(null, Buffer.from(cms.content.getBytes(), 'binary')) - }) + const r = await findAsync(recipients, async (recipient) => { + try { + const key = await this.keychain.findKeyById(recipient.keyId) + if (key) return true + } catch (err) { + return false } - ) + return false + }) + + if (!r) { + const missingKeys = recipients.map(r => r.keyId) + throw errcode(new Error('Decryption needs one of the key(s): ' + missingKeys.join(', ')), 'ERR_MISSING_KEYS', { + missingKeys + }) + } + + const key = await this.keychain.findKeyById(r.keyId) + const pem = await this.keychain._getPrivateKey(key.name) + const privateKey = forge.pki.decryptRsaPrivateKey(pem, this.keychain._()) + cms.decrypt(r.recipient, privateKey) + return Buffer.from(cms.content.getBytes(), 'binary') } } diff --git a/src/keychain.js b/src/keychain.js index f8f8889556..2f67345cda 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -5,8 +5,6 @@ const sanitize = require('sanitize-filename') const mergeOptions = require('merge-options') const crypto = require('libp2p-crypto') const DS = require('interface-datastore') -const collect = require('pull-stream/sinks/collect') -const pull = require('pull-stream/pull') const CMS = require('./cms') const errcode = require('err-code') @@ -37,22 +35,21 @@ function validateKeyName (name) { } /** - * Returns an error to the caller, after a delay + * Throws an error after a delay * * This assumes than an error indicates that the keychain is under attack. Delay returning an * error to make brute force attacks harder. * - * @param {function(Error)} callback - The caller * @param {string | Error} err - The error - * @returns {undefined} * @private */ -function _error (callback, err) { +async function throwDelayed (err) { const min = 200 const max = 1000 const delay = Math.random() * (max - min) + min - setTimeout(callback, delay, err, null) + await new Promise(resolve => setTimeout(resolve, delay)) + throw err } /** @@ -175,146 +172,131 @@ class Keychain { * @param {string} name - The local key name; cannot already exist. * @param {string} type - One of the key types; 'rsa'. * @param {int} size - The key size in bits. - * @param {function(Error, KeyInfo)} callback - * @returns {undefined} + * @returns {KeyInfo} */ - createKey (name, type, size, callback) { + async createKey (name, type, size) { const self = this if (!validateKeyName(name) || name === 'self') { - return _error(callback, errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) + return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) } if (typeof type !== 'string') { - return _error(callback, errcode(new Error(`Invalid key type '${type}'`), 'ERR_INVALID_KEY_TYPE')) + return throwDelayed(errcode(new Error(`Invalid key type '${type}'`), 'ERR_INVALID_KEY_TYPE')) } if (!Number.isSafeInteger(size)) { - return _error(callback, errcode(new Error(`Invalid key size '${size}'`), 'ERR_INVALID_KEY_SIZE')) + return throwDelayed(errcode(new Error(`Invalid key size '${size}'`), 'ERR_INVALID_KEY_SIZE')) } const dsname = DsName(name) - self.store.has(dsname, (err, exists) => { - if (err) return _error(callback, err) - if (exists) return _error(callback, errcode(new Error(`Key '${name}' already exists`), 'ERR_KEY_ALREADY_EXISTS')) - - switch (type.toLowerCase()) { - case 'rsa': - if (size < 2048) { - return _error(callback, errcode(new Error(`Invalid RSA key size ${size}`), 'ERR_INVALID_KEY_SIZE')) - } - break - default: - break + const exists = await self.store.has(dsname) + if (exists) return throwDelayed(errcode(new Error(`Key '${name}' already exists`), 'ERR_KEY_ALREADY_EXISTS')) + + switch (type.toLowerCase()) { + case 'rsa': + if (size < 2048) { + return throwDelayed(errcode(new Error(`Invalid RSA key size ${size}`), 'ERR_INVALID_KEY_SIZE')) + } + break + default: + break + } + + let keyInfo + try { + const keypair = await crypto.keys.generateKeyPair(type, size) + + const kid = await keypair.id() + const pem = await keypair.export(this._()) + keyInfo = { + name: name, + id: kid } + const batch = self.store.batch() + batch.put(dsname, pem) + batch.put(DsInfoName(name), JSON.stringify(keyInfo)) - crypto.keys.generateKeyPair(type, size, (err, keypair) => { - if (err) return _error(callback, err) - keypair.id((err, kid) => { - if (err) return _error(callback, err) - keypair.export(this._(), (err, pem) => { - if (err) return _error(callback, err) - const keyInfo = { - name: name, - id: kid - } - const batch = self.store.batch() - batch.put(dsname, pem) - batch.put(DsInfoName(name), JSON.stringify(keyInfo)) - batch.commit((err) => { - if (err) return _error(callback, err) - - callback(null, keyInfo) - }) - }) - }) - }) - }) + await batch.commit() + } catch (err) { + return throwDelayed(err) + } + + return keyInfo } /** * List all the keys. * - * @param {function(Error, KeyInfo[])} callback - * @returns {undefined} + * @returns {KeyInfo[]} */ - listKeys (callback) { + async listKeys () { const self = this const query = { prefix: infoPrefix } - pull( - self.store.query(query), - collect((err, res) => { - if (err) return _error(callback, err) - const info = res.map(r => JSON.parse(r.value)) - callback(null, info) - }) - ) + const info = [] + for await (const value of self.store.query(query)) { + info.push(JSON.parse(value.value)) + } + + return info } /** * Find a key by it's id. * * @param {string} id - The universally unique key identifier. - * @param {function(Error, KeyInfo)} callback - * @returns {undefined} + * @returns {KeyInfo} */ - findKeyById (id, callback) { - this.listKeys((err, keys) => { - if (err) return _error(callback, err) - - const key = keys.find((k) => k.id === id) - callback(null, key) - }) + async findKeyById (id) { + try { + const keys = await this.listKeys() + return keys.find((k) => k.id === id) + } catch (err) { + return throwDelayed(err) + } } /** * Find a key by it's name. * * @param {string} name - The local key name. - * @param {function(Error, KeyInfo)} callback - * @returns {undefined} + * @returns {KeyInfo} */ - findKeyByName (name, callback) { + async findKeyByName (name) { if (!validateKeyName(name)) { - return _error(callback, errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) + return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) } const dsname = DsInfoName(name) - this.store.get(dsname, (err, res) => { - if (err) { - return _error(callback, errcode(new Error(`Key '${name}' does not exist. ${err.message}`), 'ERR_KEY_NOT_FOUND')) - } - - callback(null, JSON.parse(res.toString())) - }) + try { + const res = await this.store.get(dsname) + return JSON.parse(res.toString()) + } catch (err) { + return throwDelayed(errcode(new Error(`Key '${name}' does not exist. ${err.message}`), 'ERR_KEY_NOT_FOUND')) + } } /** * Remove an existing key. * * @param {string} name - The local key name; must already exist. - * @param {function(Error, KeyInfo)} callback - * @returns {undefined} + * @returns {KeyInfo} */ - removeKey (name, callback) { + async removeKey (name) { const self = this if (!validateKeyName(name) || name === 'self') { - return _error(callback, errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) + return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) } const dsname = DsName(name) - self.findKeyByName(name, (err, keyinfo) => { - if (err) return _error(callback, err) - const batch = self.store.batch() - batch.delete(dsname) - batch.delete(DsInfoName(name)) - batch.commit((err) => { - if (err) return _error(callback, err) - callback(null, keyinfo) - }) - }) + const keyInfo = await self.findKeyByName(name) + const batch = self.store.batch() + batch.delete(dsname) + batch.delete(DsInfoName(name)) + await batch.commit() + return keyInfo } /** @@ -322,47 +304,41 @@ class Keychain { * * @param {string} oldName - The old local key name; must already exist. * @param {string} newName - The new local key name; must not already exist. - * @param {function(Error, KeyInfo)} callback - * @returns {undefined} + * @returns {KeyInfo} */ - renameKey (oldName, newName, callback) { + async renameKey (oldName, newName) { const self = this if (!validateKeyName(oldName) || oldName === 'self') { - return _error(callback, errcode(new Error(`Invalid old key name '${oldName}'`), 'ERR_OLD_KEY_NAME_INVALID')) + return throwDelayed(errcode(new Error(`Invalid old key name '${oldName}'`), 'ERR_OLD_KEY_NAME_INVALID')) } if (!validateKeyName(newName) || newName === 'self') { - return _error(callback, errcode(new Error(`Invalid new key name '${newName}'`), 'ERR_NEW_KEY_NAME_INVALID')) + return throwDelayed(errcode(new Error(`Invalid new key name '${newName}'`), 'ERR_NEW_KEY_NAME_INVALID')) } const oldDsname = DsName(oldName) const newDsname = DsName(newName) const oldInfoName = DsInfoName(oldName) const newInfoName = DsInfoName(newName) - this.store.get(oldDsname, (err, res) => { - if (err) { - return _error(callback, errcode(new Error(`Key '${oldName}' does not exist. ${err.message}`), 'ERR_KEY_NOT_FOUND')) - } + + const exists = await self.store.has(newDsname) + if (exists) return throwDelayed(errcode(new Error(`Key '${newName}' already exists`), 'ERR_KEY_ALREADY_EXISTS')) + + try { + let res = await this.store.get(oldDsname) const pem = res.toString() - self.store.has(newDsname, (err, exists) => { - if (err) return _error(callback, err) - if (exists) return _error(callback, errcode(new Error(`Key '${newName}' already exists`), 'ERR_KEY_ALREADY_EXISTS')) - - self.store.get(oldInfoName, (err, res) => { - if (err) return _error(callback, err) - - const keyInfo = JSON.parse(res.toString()) - keyInfo.name = newName - const batch = self.store.batch() - batch.put(newDsname, pem) - batch.put(newInfoName, JSON.stringify(keyInfo)) - batch.delete(oldDsname) - batch.delete(oldInfoName) - batch.commit((err) => { - if (err) return _error(callback, err) - callback(null, keyInfo) - }) - }) - }) - }) + res = await self.store.get(oldInfoName) + + const keyInfo = JSON.parse(res.toString()) + keyInfo.name = newName + const batch = self.store.batch() + batch.put(newDsname, pem) + batch.put(newInfoName, JSON.stringify(keyInfo)) + batch.delete(oldDsname) + batch.delete(oldInfoName) + await batch.commit() + return keyInfo + } catch (err) { + return throwDelayed(err) + } } /** @@ -370,28 +346,25 @@ class Keychain { * * @param {string} name - The local key name; must already exist. * @param {string} password - The password - * @param {function(Error, string)} callback - * @returns {undefined} + * @returns {string} */ - exportKey (name, password, callback) { + async exportKey (name, password) { if (!validateKeyName(name)) { - return _error(callback, errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) + return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) } if (!password) { - return _error(callback, errcode(new Error('Password is required'), 'ERR_PASSWORD_REQUIRED')) + return throwDelayed(errcode(new Error('Password is required'), 'ERR_PASSWORD_REQUIRED')) } const dsname = DsName(name) - this.store.get(dsname, (err, res) => { - if (err) { - return _error(callback, errcode(new Error(`Key '${name}' does not exist. ${err.message}`), 'ERR_KEY_NOT_FOUND')) - } + try { + const res = await this.store.get(dsname) const pem = res.toString() - crypto.keys.import(pem, this._(), (err, privateKey) => { - if (err) return _error(callback, err) - privateKey.export(password, callback) - }) - }) + const privateKey = await crypto.keys.import(pem, this._()) + return privateKey.export(password) + } catch (err) { + return throwDelayed(err) + } } /** @@ -400,99 +373,97 @@ class Keychain { * @param {string} name - The local key name; must not already exist. * @param {string} pem - The PEM encoded PKCS #8 string * @param {string} password - The password. - * @param {function(Error, KeyInfo)} callback - * @returns {undefined} + * @returns {KeyInfo} */ - importKey (name, pem, password, callback) { + async importKey (name, pem, password) { const self = this if (!validateKeyName(name) || name === 'self') { - return _error(callback, errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) + return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) } if (!pem) { - return _error(callback, 'PEM encoded key is required') + return throwDelayed(errcode(new Error('PEM encoded key is required'), 'ERR_PEM_REQUIRED')) } const dsname = DsName(name) - self.store.has(dsname, (err, exists) => { - if (err) return _error(callback, err) - if (exists) return _error(callback, errcode(new Error(`Key '${name}' already exists`), 'ERR_KEY_ALREADY_EXISTS')) - crypto.keys.import(pem, password, (err, privateKey) => { - if (err) return _error(callback, errcode(new Error('Cannot read the key, most likely the password is wrong'), 'ERR_CANNOT_READ_KEY')) - privateKey.id((err, kid) => { - if (err) return _error(callback, err) - privateKey.export(this._(), (err, pem) => { - if (err) return _error(callback, err) - const keyInfo = { - name: name, - id: kid - } - const batch = self.store.batch() - batch.put(dsname, pem) - batch.put(DsInfoName(name), JSON.stringify(keyInfo)) - batch.commit((err) => { - if (err) return _error(callback, err) - - callback(null, keyInfo) - }) - }) - }) - }) - }) + const exists = await self.store.has(dsname) + if (exists) return throwDelayed(errcode(new Error(`Key '${name}' already exists`), 'ERR_KEY_ALREADY_EXISTS')) + + let privateKey + try { + privateKey = await crypto.keys.import(pem, password) + } catch (err) { + return throwDelayed(errcode(new Error('Cannot read the key, most likely the password is wrong'), 'ERR_CANNOT_READ_KEY')) + } + + let kid + try { + kid = await privateKey.id() + pem = await privateKey.export(this._()) + } catch (err) { + return throwDelayed(err) + } + + const keyInfo = { + name: name, + id: kid + } + const batch = self.store.batch() + batch.put(dsname, pem) + batch.put(DsInfoName(name), JSON.stringify(keyInfo)) + await batch.commit() + + return keyInfo } - importPeer (name, peer, callback) { + async importPeer (name, peer) { const self = this if (!validateKeyName(name)) { - return _error(callback, errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) + return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) } if (!peer || !peer.privKey) { - return _error(callback, errcode(new Error('Peer.privKey is required'), 'ERR_MISSING_PRIVATE_KEY')) + return throwDelayed(errcode(new Error('Peer.privKey is required'), 'ERR_MISSING_PRIVATE_KEY')) } const privateKey = peer.privKey const dsname = DsName(name) - self.store.has(dsname, (err, exists) => { - if (err) return _error(callback, err) - if (exists) return _error(callback, errcode(new Error(`Key '${name}' already exists`), 'ERR_KEY_ALREADY_EXISTS')) - - privateKey.id((err, kid) => { - if (err) return _error(callback, err) - privateKey.export(this._(), (err, pem) => { - if (err) return _error(callback, err) - const keyInfo = { - name: name, - id: kid - } - const batch = self.store.batch() - batch.put(dsname, pem) - batch.put(DsInfoName(name), JSON.stringify(keyInfo)) - batch.commit((err) => { - if (err) return _error(callback, err) - - callback(null, keyInfo) - }) - }) - }) - }) + const exists = await self.store.has(dsname) + if (exists) return throwDelayed(errcode(new Error(`Key '${name}' already exists`), 'ERR_KEY_ALREADY_EXISTS')) + + try { + const kid = await privateKey.id() + const pem = await privateKey.export(this._()) + const keyInfo = { + name: name, + id: kid + } + const batch = self.store.batch() + batch.put(dsname, pem) + batch.put(DsInfoName(name), JSON.stringify(keyInfo)) + await batch.commit() + return keyInfo + } catch (err) { + return throwDelayed(err) + } } /** * Gets the private key as PEM encoded PKCS #8 string. * * @param {string} name - * @param {function(Error, string)} callback - * @returns {undefined} + * @returns {string} * @private */ - _getPrivateKey (name, callback) { + async _getPrivateKey (name) { if (!validateKeyName(name)) { - return _error(callback, errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) + return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) + } + + try { + const dsname = DsName(name) + const res = await this.store.get(dsname) + return res.toString() + } catch (err) { + return throwDelayed(errcode(new Error(`Key '${name}' does not exist. ${err.message}`), 'ERR_KEY_NOT_FOUND')) } - this.store.get(DsName(name), (err, res) => { - if (err) { - return _error(callback, errcode(new Error(`Key '${name}' does not exist. ${err.message}`), 'ERR_KEY_NOT_FOUND')) - } - callback(null, res.toString()) - }) } } diff --git a/src/util.js b/src/util.js index bc61c5b7fc..50ce4174c2 100644 --- a/src/util.js +++ b/src/util.js @@ -14,10 +14,9 @@ exports = module.exports * * @param {KeyInfo} key - The id and name of the key * @param {RsaPrivateKey} privateKey - The naked key - * @param {function(Error, Certificate)} callback * @returns {undefined} */ -exports.certificateForKey = (key, privateKey, callback) => { +exports.certificateForKey = (key, privateKey) => { const publicKey = pki.setRsaPublicKey(privateKey.n, privateKey.e) const cert = pki.createCertificate() cert.publicKey = publicKey @@ -67,5 +66,24 @@ exports.certificateForKey = (key, privateKey, callback) => { // self-sign certificate cert.sign(privateKey) - return callback(null, cert) + return cert } + +/** + * Finds the first item in a collection that is matched in the + * `asyncCompare` function. + * + * `asyncCompare` is an async function that must + * resolve to either `true` or `false`. + * + * @param {Array} array + * @param {function(*)} asyncCompare An async function that returns a boolean + */ +async function findAsync (array, asyncCompare) { + const promises = array.map(asyncCompare) + const results = await Promise.all(promises) + const index = results.findIndex(result => result) + return array[index] +} + +module.exports.findAsync = findAsync diff --git a/test/browser.js b/test/browser.js index 0a37bedd94..02222fb3b3 100644 --- a/test/browser.js +++ b/test/browser.js @@ -1,25 +1,24 @@ /* eslint-env mocha */ 'use strict' -const series = require('async/series') const LevelStore = require('datastore-level') describe('browser', () => { - const datastore1 = new LevelStore('test-keystore-1', { db: require('level-js') }) - const datastore2 = new LevelStore('test-keystore-2', { db: require('level-js') }) + const datastore1 = new LevelStore('test-keystore-1', { db: require('level') }) + const datastore2 = new LevelStore('test-keystore-2', { db: require('level') }) - before((done) => { - series([ - (cb) => datastore1.open(cb), - (cb) => datastore2.open(cb) - ], done) + before(() => { + return Promise.all([ + datastore1.open(), + datastore2.open() + ]) }) - after((done) => { - series([ - (cb) => datastore1.close(cb), - (cb) => datastore2.close(cb) - ], done) + after(() => { + return Promise.all([ + datastore1.close(), + datastore2.close() + ]) }) require('./keychain.spec')(datastore1, datastore2) diff --git a/test/cms-interop.js b/test/cms-interop.js index a7449984fd..06eb63123b 100644 --- a/test/cms-interop.js +++ b/test/cms-interop.js @@ -15,14 +15,13 @@ module.exports = (datastore) => { const aliceKeyName = 'cms-interop-alice' let ks - before((done) => { + before(() => { ks = new Keychain(datastore, { passPhrase: passPhrase }) - done() }) const plainData = Buffer.from('This is a message from Alice to Bob') - it('imports openssl key', function (done) { + it('imports openssl key', async function () { this.timeout(10 * 1000) const aliceKid = 'QmNzBqPwp42HZJccsLtc4ok6LjZAspckgs2du5tTmjPfFA' const alice = `-----BEGIN ENCRYPTED PRIVATE KEY----- @@ -43,15 +42,12 @@ igg5jozKCW82JsuWSiW9tu0F/6DuvYiZwHS3OLiJP0CuLfbOaRw8Jia1RTvXEH7m cn4oisOvxCprs4aM9UVjtZTCjfyNpX8UWwT1W3rySV+KQNhxuMy3RzmL -----END ENCRYPTED PRIVATE KEY----- ` - ks.importKey(aliceKeyName, alice, 'mypassword', (err, key) => { - expect(err).to.not.exist() - expect(key.name).to.equal(aliceKeyName) - expect(key.id).to.equal(aliceKid) - done() - }) + const key = await ks.importKey(aliceKeyName, alice, 'mypassword') + expect(key.name).to.equal(aliceKeyName) + expect(key.id).to.equal(aliceKid) }) - it('decrypts node-forge example', (done) => { + it('decrypts node-forge example', async () => { const example = ` MIIBcwYJKoZIhvcNAQcDoIIBZDCCAWACAQAxgfowgfcCAQAwYDBbMQ0wCwYDVQQK EwRpcGZzMREwDwYDVQQLEwhrZXlzdG9yZTE3MDUGA1UEAxMuUW1OekJxUHdwNDJI @@ -62,12 +58,9 @@ knU1yykWGkdlbclCuu0NaAfmb8o0OX50CbEKZB7xmsv8tnqn0H0jMF4GCSqGSIb3 DQEHATAdBglghkgBZQMEASoEEP/PW1JWehQx6/dsLkp/Mf+gMgQwFM9liLTqC56B nHILFmhac/+a/StQOKuf9dx5qXeGvt9LnwKuGGSfNX4g+dTkoa6N ` - ks.cms.decrypt(Buffer.from(example, 'base64'), (err, plain) => { - expect(err).to.not.exist() - expect(plain).to.exist() - expect(plain.toString()).to.equal(plainData.toString()) - done() - }) + const plain = await ks.cms.decrypt(Buffer.from(example, 'base64')) + expect(plain).to.exist() + expect(plain.toString()).to.equal(plainData.toString()) }) }) } diff --git a/test/keychain.spec.js b/test/keychain.spec.js index bcaa6671d0..c455f2d764 100644 --- a/test/keychain.spec.js +++ b/test/keychain.spec.js @@ -3,11 +3,11 @@ 'use strict' const chai = require('chai') -const dirtyChai = require('dirty-chai') const expect = chai.expect -chai.use(dirtyChai) +const fail = expect.fail +chai.use(require('dirty-chai')) chai.use(require('chai-string')) -const Keychain = require('..') +const Keychain = require('../') const PeerId = require('peer-id') module.exports = (datastore1, datastore2) => { @@ -55,148 +55,111 @@ module.exports = (datastore1, datastore2) => { }) describe('key name', () => { - it('is a valid filename and non-ASCII', () => { - ks.removeKey('../../nasty', (err) => { - expect(err).to.exist() - expect(err).to.have.property('message', 'Invalid key name \'../../nasty\'') - expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') - }) - ks.removeKey('', (err) => { - expect(err).to.exist() - expect(err).to.have.property('message', 'Invalid key name \'\'') - expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') - }) - ks.removeKey(' ', (err) => { - expect(err).to.exist() - expect(err).to.have.property('message', 'Invalid key name \' \'') - expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') - }) - ks.removeKey(null, (err) => { - expect(err).to.exist() - expect(err).to.have.property('message', 'Invalid key name \'null\'') - expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') - }) - ks.removeKey(undefined, (err) => { - expect(err).to.exist() - expect(err).to.have.property('message', 'Invalid key name \'undefined\'') - expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') + it('is a valid filename and non-ASCII', async () => { + const errors = await Promise.all([ + ks.removeKey('../../nasty').then(fail, err => err), + ks.removeKey('').then(fail, err => err), + ks.removeKey(' ').then(fail, err => err), + ks.removeKey(null).then(fail, err => err), + ks.removeKey(undefined).then(fail, err => err) + ]) + + expect(errors).to.have.length(5) + errors.forEach(error => { + expect(error).to.have.property('code', 'ERR_INVALID_KEY_NAME') }) }) }) describe('key', () => { - it('can be an RSA key', function (done) { - this.timeout(50 * 1000) - ks.createKey(rsaKeyName, 'rsa', 2048, (err, info) => { - expect(err).to.not.exist() - expect(info).exist() - rsaKeyInfo = info - done() - }) - }) - - it('has a name and id', () => { + it('can be an RSA key', async () => { + rsaKeyInfo = await ks.createKey(rsaKeyName, 'rsa', 2048) + expect(rsaKeyInfo).to.exist() expect(rsaKeyInfo).to.have.property('name', rsaKeyName) expect(rsaKeyInfo).to.have.property('id') }) - it('is encrypted PEM encoded PKCS #8', (done) => { - ks._getPrivateKey(rsaKeyName, (err, pem) => { - expect(err).to.not.exist() - expect(pem).to.startsWith('-----BEGIN ENCRYPTED PRIVATE KEY-----') - done() - }) + it('is encrypted PEM encoded PKCS #8', async () => { + const pem = await ks._getPrivateKey(rsaKeyName) + return expect(pem).to.startsWith('-----BEGIN ENCRYPTED PRIVATE KEY-----') }) - it('does not overwrite existing key', (done) => { - ks.createKey(rsaKeyName, 'rsa', 2048, (err) => { - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_KEY_ALREADY_EXISTS') - done() - }) + it('throws if an invalid private key name is given', async () => { + const err = await ks._getPrivateKey(undefined).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') }) - it('cannot create the "self" key', (done) => { - ks.createKey('self', 'rsa', 2048, (err) => { - expect(err).to.exist() - done() - }) + it('throws if a private key cant be found', async () => { + const err = await ks._getPrivateKey('not real').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_KEY_NOT_FOUND') }) - it('should validate name is string', (done) => { - ks.createKey(5, 'rsa', 2048, (err) => { - expect(err).to.exist() - expect(err.message).to.contain('Invalid key name') - done() - }) + it('does not overwrite existing key', async () => { + const err = await ks.createKey(rsaKeyName, 'rsa', 2048).then(fail, err => err) + expect(err).to.have.property('code', 'ERR_KEY_ALREADY_EXISTS') }) - it('should validate type is string', (done) => { - ks.createKey('TEST' + Date.now(), null, 2048, (err) => { - expect(err).to.exist() - expect(err.message).to.contain('Invalid key type') - done() - }) + it('cannot create the "self" key', async () => { + const err = await ks.createKey('self', 'rsa', 2048).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') }) - it('should validate size is integer', (done) => { - ks.createKey('TEST' + Date.now(), 'rsa', 'string', (err) => { - expect(err).to.exist() - expect(err.message).to.contain('Invalid key size') - done() - }) + it('should validate name is string', async () => { + const err = await ks.createKey(5, 'rsa', 2048).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') + }) + + it('should validate type is string', async () => { + const err = await ks.createKey('TEST' + Date.now(), null, 2048).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_KEY_TYPE') + }) + + it('should validate size is integer', async () => { + const err = await ks.createKey('TEST' + Date.now(), 'rsa', 'string').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_KEY_SIZE') }) describe('implements NIST SP 800-131A', () => { - it('disallows RSA length < 2048', (done) => { - ks.createKey('bad-nist-rsa', 'rsa', 1024, (err) => { - expect(err).to.exist() - expect(err).to.have.property('message', 'Invalid RSA key size 1024') - expect(err).to.have.property('code', 'ERR_INVALID_KEY_SIZE') - done() - }) + it('disallows RSA length < 2048', async () => { + const err = await ks.createKey('bad-nist-rsa', 'rsa', 1024).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_KEY_SIZE') }) }) }) describe('query', () => { - it('finds all existing keys', (done) => { - ks.listKeys((err, keys) => { - expect(err).to.not.exist() - expect(keys).to.exist() - const mykey = keys.find((k) => k.name.normalize() === rsaKeyName.normalize()) - expect(mykey).to.exist() - done() - }) + it('finds all existing keys', async () => { + const keys = await ks.listKeys() + expect(keys).to.exist() + const mykey = keys.find((k) => k.name.normalize() === rsaKeyName.normalize()) + expect(mykey).to.exist() }) - it('finds a key by name', (done) => { - ks.findKeyByName(rsaKeyName, (err, key) => { - expect(err).to.not.exist() - expect(key).to.exist() - expect(key).to.deep.equal(rsaKeyInfo) - done() - }) + it('finds a key by name', async () => { + const key = await ks.findKeyByName(rsaKeyName) + expect(key).to.exist() + expect(key).to.deep.equal(rsaKeyInfo) }) - it('finds a key by id', (done) => { - ks.findKeyById(rsaKeyInfo.id, (err, key) => { - expect(err).to.not.exist() - expect(key).to.exist() - expect(key).to.deep.equal(rsaKeyInfo) - done() - }) + it('finds a key by id', async () => { + const key = await ks.findKeyById(rsaKeyInfo.id) + expect(key).to.exist() + expect(key).to.deep.equal(rsaKeyInfo) }) - it('returns the key\'s name and id', (done) => { - ks.listKeys((err, keys) => { - expect(err).to.not.exist() - expect(keys).to.exist() - keys.forEach((key) => { - expect(key).to.have.property('name') - expect(key).to.have.property('id') - }) - done() + it('returns the key\'s name and id', async () => { + const keys = await ks.listKeys() + expect(keys).to.exist() + keys.forEach((key) => { + expect(key).to.have.property('name') + expect(key).to.have.property('id') }) }) }) @@ -205,103 +168,97 @@ module.exports = (datastore1, datastore2) => { const plainData = Buffer.from('This is a message from Alice to Bob') let cms - it('service is available', (done) => { + it('service is available', () => { expect(ks).to.have.property('cms') - done() }) - it('requires a key', (done) => { - ks.cms.encrypt('no-key', plainData, (err, msg) => { - expect(err).to.exist() - done() - }) + it('requires a key', async () => { + const err = await ks.cms.encrypt('no-key', plainData).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_KEY_NOT_FOUND') }) - it('requires plain data as a Buffer', (done) => { - ks.cms.encrypt(rsaKeyName, 'plain data', (err, msg) => { - expect(err).to.exist() - done() - }) + it('requires plain data as a Buffer', async () => { + const err = await ks.cms.encrypt(rsaKeyName, 'plain data').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_PARAMS') }) - it('encrypts', (done) => { - ks.cms.encrypt(rsaKeyName, plainData, (err, msg) => { - expect(err).to.not.exist() - expect(msg).to.exist() - expect(msg).to.be.instanceOf(Buffer) - cms = msg - done() - }) + it('encrypts', async () => { + cms = await ks.cms.encrypt(rsaKeyName, plainData) + expect(cms).to.exist() + expect(cms).to.be.instanceOf(Buffer) }) - it('is a PKCS #7 message', (done) => { - ks.cms.decrypt('not CMS', (err) => { - expect(err).to.exist() - done() - }) + it('is a PKCS #7 message', async () => { + const err = await ks.cms.decrypt('not CMS').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_PARAMS') }) - it('is a PKCS #7 binary message', (done) => { - ks.cms.decrypt(plainData, (err) => { - expect(err).to.exist() - done() - }) + it('is a PKCS #7 binary message', async () => { + const err = await ks.cms.decrypt(plainData).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_CMS') }) - it('cannot be read without the key', (done) => { - emptyKeystore.cms.decrypt(cms, (err, plain) => { - expect(err).to.exist() - expect(err).to.have.property('missingKeys') - expect(err.missingKeys).to.eql([rsaKeyInfo.id]) - expect(err).to.have.property('code', 'ERR_MISSING_KEYS') - done() - }) + it('cannot be read without the key', async () => { + const err = await emptyKeystore.cms.decrypt(cms).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('missingKeys') + expect(err.missingKeys).to.eql([rsaKeyInfo.id]) + expect(err).to.have.property('code', 'ERR_MISSING_KEYS') }) - it('can be read with the key', (done) => { - ks.cms.decrypt(cms, (err, plain) => { - expect(err).to.not.exist() - expect(plain).to.exist() - expect(plain.toString()).to.equal(plainData.toString()) - done() - }) + it('can be read with the key', async () => { + const plain = await ks.cms.decrypt(cms) + expect(plain).to.exist() + expect(plain.toString()).to.equal(plainData.toString()) }) }) describe('exported key', () => { let pemKey - it('is a PKCS #8 encrypted pem', (done) => { - ks.exportKey(rsaKeyName, 'password', (err, pem) => { - expect(err).to.not.exist() - expect(pem).to.startsWith('-----BEGIN ENCRYPTED PRIVATE KEY-----') - pemKey = pem - done() - }) + it('requires the password', async () => { + const err = await ks.exportKey(rsaKeyName).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_PASSWORD_REQUIRED') }) - it('can be imported', (done) => { - ks.importKey('imported-key', pemKey, 'password', (err, key) => { - expect(err).to.not.exist() - expect(key.name).to.equal('imported-key') - expect(key.id).to.equal(rsaKeyInfo.id) - done() - }) + it('requires the key name', async () => { + const err = await ks.exportKey(undefined, 'password').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') }) - it('cannot be imported as an existing key name', (done) => { - ks.importKey(rsaKeyName, pemKey, 'password', (err, key) => { - expect(err).to.exist() - done() - }) + it('is a PKCS #8 encrypted pem', async () => { + pemKey = await ks.exportKey(rsaKeyName, 'password') + expect(pemKey).to.startsWith('-----BEGIN ENCRYPTED PRIVATE KEY-----') }) - it('cannot be imported with the wrong password', function (done) { - this.timeout(5 * 1000) - ks.importKey('a-new-name-for-import', pemKey, 'not the password', (err, key) => { - expect(err).to.exist() - done() - }) + it('can be imported', async () => { + const key = await ks.importKey('imported-key', pemKey, 'password') + expect(key.name).to.equal('imported-key') + expect(key.id).to.equal(rsaKeyInfo.id) + }) + + it('requires the pem', async () => { + const err = await ks.importKey('imported-key', undefined, 'password').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_PEM_REQUIRED') + }) + + it('cannot be imported as an existing key name', async () => { + const err = await ks.importKey(rsaKeyName, pemKey, 'password').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_KEY_ALREADY_EXISTS') + }) + + it('cannot be imported with the wrong password', async () => { + const err = await ks.importKey('a-new-name-for-import', pemKey, 'not the password').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_CANNOT_READ_KEY') }) }) @@ -309,136 +266,117 @@ module.exports = (datastore1, datastore2) => { const alicePrivKey = 'CAASpgkwggSiAgEAAoIBAQC2SKo/HMFZeBml1AF3XijzrxrfQXdJzjePBZAbdxqKR1Mc6juRHXij6HXYPjlAk01BhF1S3Ll4Lwi0cAHhggf457sMg55UWyeGKeUv0ucgvCpBwlR5cQ020i0MgzjPWOLWq1rtvSbNcAi2ZEVn6+Q2EcHo3wUvWRtLeKz+DZSZfw2PEDC+DGPJPl7f8g7zl56YymmmzH9liZLNrzg/qidokUv5u1pdGrcpLuPNeTODk0cqKB+OUbuKj9GShYECCEjaybJDl9276oalL9ghBtSeEv20kugatTvYy590wFlJkkvyl+nPxIH0EEYMKK9XRWlu9XYnoSfboiwcv8M3SlsjAgMBAAECggEAZtju/bcKvKFPz0mkHiaJcpycy9STKphorpCT83srBVQi59CdFU6Mj+aL/xt0kCPMVigJw8P3/YCEJ9J+rS8BsoWE+xWUEsJvtXoT7vzPHaAtM3ci1HZd302Mz1+GgS8Epdx+7F5p80XAFLDUnELzOzKftvWGZmWfSeDnslwVONkL/1VAzwKy7Ce6hk4SxRE7l2NE2OklSHOzCGU1f78ZzVYKSnS5Ag9YrGjOAmTOXDbKNKN/qIorAQ1bovzGoCwx3iGIatQKFOxyVCyO1PsJYT7JO+kZbhBWRRE+L7l+ppPER9bdLFxs1t5CrKc078h+wuUr05S1P1JjXk68pk3+kQKBgQDeK8AR11373Mzib6uzpjGzgNRMzdYNuExWjxyxAzz53NAR7zrPHvXvfIqjDScLJ4NcRO2TddhXAfZoOPVH5k4PJHKLBPKuXZpWlookCAyENY7+Pd55S8r+a+MusrMagYNljb5WbVTgN8cgdpim9lbbIFlpN6SZaVjLQL3J8TWH6wKBgQDSChzItkqWX11CNstJ9zJyUE20I7LrpyBJNgG1gtvz3ZMUQCn3PxxHtQzN9n1P0mSSYs+jBKPuoSyYLt1wwe10/lpgL4rkKWU3/m1Myt0tveJ9WcqHh6tzcAbb/fXpUFT/o4SWDimWkPkuCb+8j//2yiXk0a/T2f36zKMuZvujqQKBgC6B7BAQDG2H2B/ijofp12ejJU36nL98gAZyqOfpLJ+FeMz4TlBDQ+phIMhnHXA5UkdDapQ+zA3SrFk+6yGk9Vw4Hf46B+82SvOrSbmnMa+PYqKYIvUzR4gg34rL/7AhwnbEyD5hXq4dHwMNsIDq+l2elPjwm/U9V0gdAl2+r50HAoGALtsKqMvhv8HucAMBPrLikhXP/8um8mMKFMrzfqZ+otxfHzlhI0L08Bo3jQrb0Z7ByNY6M8epOmbCKADsbWcVre/AAY0ZkuSZK/CaOXNX/AhMKmKJh8qAOPRY02LIJRBCpfS4czEdnfUhYV/TYiFNnKRj57PPYZdTzUsxa/yVTmECgYBr7slQEjb5Onn5mZnGDh+72BxLNdgwBkhO0OCdpdISqk0F0Pxby22DFOKXZEpiyI9XYP1C8wPiJsShGm2yEwBPWXnrrZNWczaVuCbXHrZkWQogBDG3HGXNdU4MAWCyiYlyinIBpPpoAJZSzpGLmWbMWh28+RJS6AQX6KHrK1o2uw==' let alice - before(function (done) { + before(async function () { const encoded = Buffer.from(alicePrivKey, 'base64') - PeerId.createFromPrivKey(encoded, (err, id) => { - expect(err).to.not.exist() - alice = id - done() - }) + alice = await PeerId.createFromPrivKey(encoded) }) - it('private key can be imported', (done) => { - ks.importPeer('alice', alice, (err, key) => { - expect(err).to.not.exist() - expect(key.name).to.equal('alice') - expect(key.id).to.equal(alice.toB58String()) - done() - }) + it('private key can be imported', async () => { + const key = await ks.importPeer('alice', alice) + expect(key.name).to.equal('alice') + expect(key.id).to.equal(alice.toB58String()) }) - it('key id exists', (done) => { - ks.findKeyById(alice.toB58String(), (err, key) => { - expect(err).to.not.exist() - expect(key).to.exist() - expect(key).to.have.property('name', 'alice') - expect(key).to.have.property('id', alice.toB58String()) - done() - }) + it('private key import requires a valid name', async () => { + const err = await ks.importPeer(undefined, alice).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') }) - it('key name exists', (done) => { - ks.findKeyByName('alice', (err, key) => { - expect(err).to.not.exist() - expect(key).to.exist() - expect(key).to.have.property('name', 'alice') - expect(key).to.have.property('id', alice.toB58String()) - done() - }) + it('private key import requires the peer', async () => { + const err = await ks.importPeer('alice').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_MISSING_PRIVATE_KEY') + }) + + it('key id exists', async () => { + const key = await ks.findKeyById(alice.toB58String()) + expect(key).to.exist() + expect(key).to.have.property('name', 'alice') + expect(key).to.have.property('id', alice.toB58String()) + }) + + it('key name exists', async () => { + const key = await ks.findKeyByName('alice') + expect(key).to.exist() + expect(key).to.have.property('name', 'alice') + expect(key).to.have.property('id', alice.toB58String()) }) }) describe('rename', () => { - it('requires an existing key name', (done) => { - ks.renameKey('not-there', renamedRsaKeyName, (err) => { - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_KEY_NOT_FOUND') - done() - }) + it('requires an existing key name', async () => { + const err = await ks.renameKey('not-there', renamedRsaKeyName).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_NOT_FOUND') }) - it('requires a valid new key name', (done) => { - ks.renameKey(rsaKeyName, '..\not-valid', (err) => { - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_NEW_KEY_NAME_INVALID') - done() - }) + it('requires a valid new key name', async () => { + const err = await ks.renameKey(rsaKeyName, '..\not-valid').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_NEW_KEY_NAME_INVALID') }) - it('does not overwrite existing key', (done) => { - ks.renameKey(rsaKeyName, rsaKeyName, (err) => { - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_KEY_ALREADY_EXISTS') - done() - }) + it('does not overwrite existing key', async () => { + const err = await ks.renameKey(rsaKeyName, rsaKeyName).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_KEY_ALREADY_EXISTS') }) - it('cannot create the "self" key', (done) => { - ks.renameKey(rsaKeyName, 'self', (err) => { - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_NEW_KEY_NAME_INVALID') - done() - }) + it('cannot create the "self" key', async () => { + const err = await ks.renameKey(rsaKeyName, 'self').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_NEW_KEY_NAME_INVALID') }) - it('removes the existing key name', (done) => { - ks.renameKey(rsaKeyName, renamedRsaKeyName, (err, key) => { - expect(err).to.not.exist() - expect(key).to.exist() - expect(key).to.have.property('name', renamedRsaKeyName) - expect(key).to.have.property('id', rsaKeyInfo.id) - ks.findKeyByName(rsaKeyName, (err, key) => { - expect(err).to.exist() - done() - }) - }) + it('removes the existing key name', async () => { + const key = await ks.renameKey(rsaKeyName, renamedRsaKeyName) + expect(key).to.exist() + expect(key).to.have.property('name', renamedRsaKeyName) + expect(key).to.have.property('id', rsaKeyInfo.id) + // Try to find the changed key + const err = await ks.findKeyByName(rsaKeyName).then(fail, err => err) + expect(err).to.exist() }) - it('creates the new key name', (done) => { - ks.findKeyByName(renamedRsaKeyName, (err, key) => { - expect(err).to.not.exist() - expect(key).to.exist() - expect(key).to.have.property('name', renamedRsaKeyName) - done() - }) + it('creates the new key name', async () => { + const key = await ks.findKeyByName(renamedRsaKeyName) + expect(key).to.exist() + expect(key).to.have.property('name', renamedRsaKeyName) }) - it('does not change the key ID', (done) => { - ks.findKeyByName(renamedRsaKeyName, (err, key) => { - expect(err).to.not.exist() - expect(key).to.exist() - expect(key).to.have.property('name', renamedRsaKeyName) - expect(key).to.have.property('id', rsaKeyInfo.id) - done() - }) + it('does not change the key ID', async () => { + const key = await ks.findKeyByName(renamedRsaKeyName) + expect(key).to.exist() + expect(key).to.have.property('name', renamedRsaKeyName) + expect(key).to.have.property('id', rsaKeyInfo.id) + }) + + it('throws with invalid key names', async () => { + const err = await ks.findKeyByName(undefined).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') }) }) describe('key removal', () => { - it('cannot remove the "self" key', (done) => { - ks.removeKey('self', (err) => { - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') - done() - }) + it('cannot remove the "self" key', async () => { + const err = await ks.removeKey('self').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') }) - it('cannot remove an unknown key', (done) => { - ks.removeKey('not-there', (err) => { - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_KEY_NOT_FOUND') - done() - }) + it('cannot remove an unknown key', async () => { + const err = await ks.removeKey('not-there').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_KEY_NOT_FOUND') }) - it('can remove a known key', (done) => { - ks.removeKey(renamedRsaKeyName, (err, key) => { - expect(err).to.not.exist() - expect(key).to.exist() - expect(key).to.have.property('name', renamedRsaKeyName) - expect(key).to.have.property('id', rsaKeyInfo.id) - done() - }) + it('can remove a known key', async () => { + const key = await ks.removeKey(renamedRsaKeyName) + expect(key).to.exist() + expect(key).to.have.property('name', renamedRsaKeyName) + expect(key).to.have.property('id', rsaKeyInfo.id) }) }) }) diff --git a/test/node.js b/test/node.js index e11d074431..f67b6b6489 100644 --- a/test/node.js +++ b/test/node.js @@ -3,8 +3,8 @@ const os = require('os') const path = require('path') -const rimraf = require('rimraf') -const series = require('async/series') +const promisify = require('promisify-es6') +const rimraf = promisify(require('rimraf')) const FsStore = require('datastore-fs') describe('node', () => { @@ -13,20 +13,16 @@ describe('node', () => { const datastore1 = new FsStore(store1) const datastore2 = new FsStore(store2) - before((done) => { - series([ - (cb) => datastore1.open(cb), - (cb) => datastore2.open(cb) - ], done) + before(async () => { + await datastore1.open() + await datastore2.open() }) - after((done) => { - series([ - (cb) => datastore1.close(cb), - (cb) => datastore2.close(cb), - (cb) => rimraf(store1, cb), - (cb) => rimraf(store2, cb) - ], done) + after(async () => { + await datastore1.close() + await datastore2.close() + await rimraf(store1) + await rimraf(store2) }) require('./keychain.spec')(datastore1, datastore2) diff --git a/test/peerid.js b/test/peerid.js index 7d6588cb85..74ba9bf678 100644 --- a/test/peerid.js +++ b/test/peerid.js @@ -21,55 +21,32 @@ describe('peer ID', () => { let peer let publicKeyDer // a buffer - before(function (done) { + before(async () => { const encoded = Buffer.from(sample.privKey, 'base64') - PeerId.createFromPrivKey(encoded, (err, id) => { - expect(err).to.not.exist() - peer = id - done() - }) + peer = await PeerId.createFromPrivKey(encoded) }) - it('decoded public key', (done) => { - // console.log('peer id', peer.toJSON()) - // console.log('id', peer.toB58String()) - // console.log('id decoded', multihash.decode(peer.id)) - + it('decoded public key', () => { // get protobuf version of the public key const publicKeyProtobuf = peer.marshalPubKey() const publicKey = crypto.keys.unmarshalPublicKey(publicKeyProtobuf) - // console.log('public key', publicKey) publicKeyDer = publicKey.marshal() - // console.log('public key der', publicKeyDer.toString('base64')) // get protobuf version of the private key const privateKeyProtobuf = peer.marshalPrivKey() - crypto.keys.unmarshalPrivateKey(privateKeyProtobuf, (err, key) => { - expect(err).to.not.exist() - // console.log('private key', key) - // console.log('\nprivate key der', key.marshal().toString('base64')) - done() - }) + const key = crypto.keys.unmarshalPrivateKey(privateKeyProtobuf) + expect(key).to.exist() }) - it('encoded public key with DER', (done) => { + it('encoded public key with DER', async () => { const jwk = rsaUtils.pkixToJwk(publicKeyDer) - // console.log('jwk', jwk) const rsa = new rsaClass.RsaPublicKey(jwk) - // console.log('rsa', rsa) - rsa.hash((err, keyId) => { - expect(err).to.not.exist() - // console.log('err', err) - // console.log('keyId', keyId) - // console.log('id decoded', multihash.decode(keyId)) - const kids = multihash.toB58String(keyId) - // console.log('id', kids) - expect(kids).to.equal(peer.toB58String()) - done() - }) + const keyId = await rsa.hash() + const kids = multihash.toB58String(keyId) + expect(kids).to.equal(peer.toB58String()) }) - it('encoded public key with JWT', (done) => { + it('encoded public key with JWT', async () => { const jwk = { kty: 'RSA', n: 'tkiqPxzBWXgZpdQBd14o868a30F3Sc43jwWQG3caikdTHOo7kR14o-h12D45QJNNQYRdUty5eC8ItHAB4YIH-Oe7DIOeVFsnhinlL9LnILwqQcJUeXENNtItDIM4z1ji1qta7b0mzXAItmRFZ-vkNhHB6N8FL1kbS3is_g2UmX8NjxAwvgxjyT5e3_IO85eemMpppsx_ZYmSza84P6onaJFL-btaXRq3KS7jzXkzg5NHKigfjlG7io_RkoWBAghI2smyQ5fdu-qGpS_YIQbUnhL9tJLoGrU72MufdMBZSZJL8pfpz8SB9BBGDCivV0VpbvV2J6En26IsHL_DN0pbIw', @@ -77,33 +54,16 @@ describe('peer ID', () => { alg: 'RS256', kid: '2011-04-29' } - // console.log('jwk', jwk) const rsa = new rsaClass.RsaPublicKey(jwk) - // console.log('rsa', rsa) - rsa.hash((err, keyId) => { - expect(err).to.not.exist() - // console.log('err', err) - // console.log('keyId', keyId) - // console.log('id decoded', multihash.decode(keyId)) - const kids = multihash.toB58String(keyId) - // console.log('id', kids) - expect(kids).to.equal(peer.toB58String()) - done() - }) + const keyId = await rsa.hash() + const kids = multihash.toB58String(keyId) + expect(kids).to.equal(peer.toB58String()) }) - it('decoded private key', (done) => { - // console.log('peer id', peer.toJSON()) - // console.log('id', peer.toB58String()) - // console.log('id decoded', multihash.decode(peer.id)) - + it('decoded private key', async () => { // get protobuf version of the private key const privateKeyProtobuf = peer.marshalPrivKey() - crypto.keys.unmarshalPrivateKey(privateKeyProtobuf, (err, key) => { - expect(err).to.not.exist() - // console.log('private key', key) - // console.log('\nprivate key der', key.marshal().toString('base64')) - done() - }) + const key = await crypto.keys.unmarshalPrivateKey(privateKeyProtobuf) + expect(key).to.exist() }) }) From e375c2f1e8b5f2972c5bc0156c209b9d7cecd606 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Fri, 16 Aug 2019 14:25:02 +0100 Subject: [PATCH 070/102] chore: update contributors --- package.json | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index eb3d3085c1..450281fdc2 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "libp2p-keychain", - "version": "0.4.2", + "version": "0.5.0", "description": "Key management and cryptographically protected messages", "leadMaintainer": "Vasco Santos ", "main": "src/index.js", @@ -66,11 +66,12 @@ "Alex Potsides ", "David Dias ", "Hugo Dias ", + "Jacob Heun ", "Maciej Krüger ", "Masahiro Saito ", "Richard Schneider ", - "Vasco Santos ", "Vasco Santos ", + "Vasco Santos ", "Victor Bjelkholm " ] } From ad378174f79fb0bfbda75c4b12eb817d2960d27c Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Fri, 16 Aug 2019 14:25:02 +0100 Subject: [PATCH 071/102] chore: release version v0.5.0 --- CHANGELOG.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4b40f5b154..c2bd4f3e90 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,18 @@ + +# [0.5.0](https://github.com/libp2p/js-libp2p-keychain/compare/v0.4.2...v0.5.0) (2019-08-16) + + +* refactor: use async/await instead of callbacks (#37) ([dda315a](https://github.com/libp2p/js-libp2p-keychain/commit/dda315a)), closes [#37](https://github.com/libp2p/js-libp2p-keychain/issues/37) + + +### BREAKING CHANGES + +* The api now uses async/await instead of callbacks. + +Co-Authored-By: Vasco Santos + + + ## [0.4.2](https://github.com/libp2p/js-libp2p-keychain/compare/v0.4.1...v0.4.2) (2019-06-13) From 893a2c975c098675f3841a13f2a6a3aa7e95ddf9 Mon Sep 17 00:00:00 2001 From: Alex Potsides Date: Wed, 25 Sep 2019 11:19:38 +0100 Subject: [PATCH 072/102] chore: downgrade peer-id to same version used by libp2p (#38) --- package.json | 4 ++-- src/keychain.js | 43 ++++++++++++++++++++++++++++++++----------- test/keychain.spec.js | 3 ++- test/node.js | 4 ++-- test/peerid.js | 21 +++++++++++++++------ 5 files changed, 53 insertions(+), 22 deletions(-) diff --git a/package.json b/package.json index 450281fdc2..8a86f81958 100644 --- a/package.json +++ b/package.json @@ -42,7 +42,7 @@ "dependencies": { "err-code": "^2.0.0", "interface-datastore": "^0.7.0", - "libp2p-crypto": "^0.17.0", + "libp2p-crypto": "^0.16.2", "merge-options": "^1.0.1", "node-forge": "^0.8.5", "sanitize-filename": "^1.6.1" @@ -56,7 +56,7 @@ "dirty-chai": "^2.0.1", "level": "^5.0.1", "multihashes": "^0.4.15", - "peer-id": "^0.13.2", + "peer-id": "^0.12.2", "promisify-es6": "^1.0.3", "rimraf": "^2.6.3" }, diff --git a/src/keychain.js b/src/keychain.js index 2f67345cda..7bd8ba34ac 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -5,6 +5,7 @@ const sanitize = require('sanitize-filename') const mergeOptions = require('merge-options') const crypto = require('libp2p-crypto') const DS = require('interface-datastore') +const promisify = require('promisify-es6') const CMS = require('./cms') const errcode = require('err-code') @@ -205,10 +206,16 @@ class Keychain { let keyInfo try { - const keypair = await crypto.keys.generateKeyPair(type, size) - - const kid = await keypair.id() - const pem = await keypair.export(this._()) + const keypair = await promisify(crypto.keys.generateKeyPair, { + context: crypto.keys + })(type, size) + + const kid = await promisify(keypair.id, { + context: keypair + })() + const pem = await promisify(keypair.export, { + context: keypair + })(this._()) keyInfo = { name: name, id: kid @@ -360,8 +367,12 @@ class Keychain { try { const res = await this.store.get(dsname) const pem = res.toString() - const privateKey = await crypto.keys.import(pem, this._()) - return privateKey.export(password) + const privateKey = await promisify(crypto.keys.import, { + context: crypto.keys + })(pem, this._()) + return promisify(privateKey.export, { + context: privateKey + })(password) } catch (err) { return throwDelayed(err) } @@ -389,15 +400,21 @@ class Keychain { let privateKey try { - privateKey = await crypto.keys.import(pem, password) + privateKey = await promisify(crypto.keys.import, { + context: crypto.keys + })(pem, password) } catch (err) { return throwDelayed(errcode(new Error('Cannot read the key, most likely the password is wrong'), 'ERR_CANNOT_READ_KEY')) } let kid try { - kid = await privateKey.id() - pem = await privateKey.export(this._()) + kid = await promisify(privateKey.id, { + context: privateKey + })() + pem = await promisify(privateKey.export, { + context: privateKey + })(this._()) } catch (err) { return throwDelayed(err) } @@ -429,8 +446,12 @@ class Keychain { if (exists) return throwDelayed(errcode(new Error(`Key '${name}' already exists`), 'ERR_KEY_ALREADY_EXISTS')) try { - const kid = await privateKey.id() - const pem = await privateKey.export(this._()) + const kid = await promisify(privateKey.id, { + context: privateKey + })() + const pem = await promisify(privateKey.export, { + context: privateKey + })(this._()) const keyInfo = { name: name, id: kid diff --git a/test/keychain.spec.js b/test/keychain.spec.js index c455f2d764..0756f5f252 100644 --- a/test/keychain.spec.js +++ b/test/keychain.spec.js @@ -9,6 +9,7 @@ chai.use(require('dirty-chai')) chai.use(require('chai-string')) const Keychain = require('../') const PeerId = require('peer-id') +const promisify = require('promisify-es6') module.exports = (datastore1, datastore2) => { describe('keychain', () => { @@ -268,7 +269,7 @@ module.exports = (datastore1, datastore2) => { before(async function () { const encoded = Buffer.from(alicePrivKey, 'base64') - alice = await PeerId.createFromPrivKey(encoded) + alice = await promisify(PeerId.createFromPrivKey)(encoded) }) it('private key can be imported', async () => { diff --git a/test/node.js b/test/node.js index f67b6b6489..bbb2508960 100644 --- a/test/node.js +++ b/test/node.js @@ -8,8 +8,8 @@ const rimraf = promisify(require('rimraf')) const FsStore = require('datastore-fs') describe('node', () => { - const store1 = path.join(os.tmpdir(), 'test-keystore-1') - const store2 = path.join(os.tmpdir(), 'test-keystore-2') + const store1 = path.join(os.tmpdir(), 'test-keystore-1-' + Date.now()) + const store2 = path.join(os.tmpdir(), 'test-keystore-2-' + Date.now()) const datastore1 = new FsStore(store1) const datastore2 = new FsStore(store2) diff --git a/test/peerid.js b/test/peerid.js index 74ba9bf678..42274db054 100644 --- a/test/peerid.js +++ b/test/peerid.js @@ -10,6 +10,7 @@ const multihash = require('multihashes') const crypto = require('libp2p-crypto') const rsaUtils = require('libp2p-crypto/src/keys/rsa-utils') const rsaClass = require('libp2p-crypto/src/keys/rsa-class') +const promisify = require('promisify-es6') const sample = { id: '122019318b6e5e0cf93a2314bf01269a2cc23cd3dcd452d742cdb9379d8646f6e4a9', @@ -23,10 +24,10 @@ describe('peer ID', () => { before(async () => { const encoded = Buffer.from(sample.privKey, 'base64') - peer = await PeerId.createFromPrivKey(encoded) + peer = await promisify(PeerId.createFromPrivKey)(encoded) }) - it('decoded public key', () => { + it('decoded public key', async () => { // get protobuf version of the public key const publicKeyProtobuf = peer.marshalPubKey() const publicKey = crypto.keys.unmarshalPublicKey(publicKeyProtobuf) @@ -34,14 +35,18 @@ describe('peer ID', () => { // get protobuf version of the private key const privateKeyProtobuf = peer.marshalPrivKey() - const key = crypto.keys.unmarshalPrivateKey(privateKeyProtobuf) + const key = await promisify(crypto.keys.unmarshalPrivateKey, { + context: crypto.keys + })(privateKeyProtobuf) expect(key).to.exist() }) it('encoded public key with DER', async () => { const jwk = rsaUtils.pkixToJwk(publicKeyDer) const rsa = new rsaClass.RsaPublicKey(jwk) - const keyId = await rsa.hash() + const keyId = await promisify(rsa.hash, { + context: rsa + })() const kids = multihash.toB58String(keyId) expect(kids).to.equal(peer.toB58String()) }) @@ -55,7 +60,9 @@ describe('peer ID', () => { kid: '2011-04-29' } const rsa = new rsaClass.RsaPublicKey(jwk) - const keyId = await rsa.hash() + const keyId = await promisify(rsa.hash, { + context: rsa + })() const kids = multihash.toB58String(keyId) expect(kids).to.equal(peer.toB58String()) }) @@ -63,7 +70,9 @@ describe('peer ID', () => { it('decoded private key', async () => { // get protobuf version of the private key const privateKeyProtobuf = peer.marshalPrivKey() - const key = await crypto.keys.unmarshalPrivateKey(privateKeyProtobuf) + const key = await promisify(crypto.keys.unmarshalPrivateKey, { + context: crypto.keys + })(privateKeyProtobuf) expect(key).to.exist() }) }) From b9eb9d7b4ae36dc1ad8657be3464a7bfe85008de Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Wed, 25 Sep 2019 12:33:28 +0200 Subject: [PATCH 073/102] chore: update contributors --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 8a86f81958..df74c12cfd 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "libp2p-keychain", - "version": "0.5.0", + "version": "0.5.1", "description": "Key management and cryptographically protected messages", "leadMaintainer": "Vasco Santos ", "main": "src/index.js", From ce8c412fb6c1ecc087ad332122826d2bd43d7c19 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Wed, 25 Sep 2019 12:33:28 +0200 Subject: [PATCH 074/102] chore: release version v0.5.1 --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c2bd4f3e90..12598fe352 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ + +## [0.5.1](https://github.com/libp2p/js-libp2p-keychain/compare/v0.5.0...v0.5.1) (2019-09-25) + + + # [0.5.0](https://github.com/libp2p/js-libp2p-keychain/compare/v0.4.2...v0.5.0) (2019-08-16) From 8de96817edada969ce96741a8adff63042e6a48e Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Mon, 2 Dec 2019 16:08:08 +0000 Subject: [PATCH 075/102] chore: update node-forge dependency (#39) --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index df74c12cfd..cb4a9065e1 100644 --- a/package.json +++ b/package.json @@ -44,7 +44,7 @@ "interface-datastore": "^0.7.0", "libp2p-crypto": "^0.16.2", "merge-options": "^1.0.1", - "node-forge": "^0.8.5", + "node-forge": "^0.9.1", "sanitize-filename": "^1.6.1" }, "devDependencies": { From ff6bd50350b84fbf60592489e6f6c35e2400a748 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Mon, 2 Dec 2019 17:17:38 +0100 Subject: [PATCH 076/102] chore: update contributors --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index cb4a9065e1..ffed277873 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "libp2p-keychain", - "version": "0.5.1", + "version": "0.5.2", "description": "Key management and cryptographically protected messages", "leadMaintainer": "Vasco Santos ", "main": "src/index.js", From 163edbbe88d0c6610ce365130ad55203051332f9 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Mon, 2 Dec 2019 17:17:38 +0100 Subject: [PATCH 077/102] chore: release version v0.5.2 --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 12598fe352..dd36aa070e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ + +## [0.5.2](https://github.com/libp2p/js-libp2p-keychain/compare/v0.5.1...v0.5.2) (2019-12-02) + + + ## [0.5.1](https://github.com/libp2p/js-libp2p-keychain/compare/v0.5.0...v0.5.1) (2019-09-25) From b6d5313a550555b435bd301d811af75397dc91e4 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Wed, 18 Dec 2019 10:04:20 +0000 Subject: [PATCH 078/102] chore: update deps (#40) --- package.json | 14 +++++++------- src/keychain.js | 42 ++++++++++-------------------------------- test/keychain.spec.js | 3 +-- test/peerid.js | 19 +++++-------------- 4 files changed, 23 insertions(+), 55 deletions(-) diff --git a/package.json b/package.json index ffed277873..ff6dbc6d41 100644 --- a/package.json +++ b/package.json @@ -41,9 +41,9 @@ "homepage": "https://github.com/libp2p/js-libp2p-keychain#readme", "dependencies": { "err-code": "^2.0.0", - "interface-datastore": "^0.7.0", - "libp2p-crypto": "^0.16.2", - "merge-options": "^1.0.1", + "interface-datastore": "^0.8.0", + "libp2p-crypto": "^0.17.1", + "merge-options": "^2.0.0", "node-forge": "^0.9.1", "sanitize-filename": "^1.6.1" }, @@ -52,13 +52,13 @@ "chai": "^4.2.0", "chai-string": "^1.5.0", "datastore-fs": "^0.9.0", - "datastore-level": "^0.12.1", + "datastore-level": "^0.14.0", "dirty-chai": "^2.0.1", - "level": "^5.0.1", + "level": "^6.0.0", "multihashes": "^0.4.15", - "peer-id": "^0.12.2", + "peer-id": "^0.13.5", "promisify-es6": "^1.0.3", - "rimraf": "^2.6.3" + "rimraf": "^3.0.0" }, "contributors": [ "Alan Shaw ", diff --git a/src/keychain.js b/src/keychain.js index 7bd8ba34ac..aae7897224 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -5,7 +5,6 @@ const sanitize = require('sanitize-filename') const mergeOptions = require('merge-options') const crypto = require('libp2p-crypto') const DS = require('interface-datastore') -const promisify = require('promisify-es6') const CMS = require('./cms') const errcode = require('err-code') @@ -206,16 +205,9 @@ class Keychain { let keyInfo try { - const keypair = await promisify(crypto.keys.generateKeyPair, { - context: crypto.keys - })(type, size) - - const kid = await promisify(keypair.id, { - context: keypair - })() - const pem = await promisify(keypair.export, { - context: keypair - })(this._()) + const keypair = await crypto.keys.generateKeyPair(type, size) + const kid = await keypair.id() + const pem = await keypair.export(this._()) keyInfo = { name: name, id: kid @@ -367,12 +359,8 @@ class Keychain { try { const res = await this.store.get(dsname) const pem = res.toString() - const privateKey = await promisify(crypto.keys.import, { - context: crypto.keys - })(pem, this._()) - return promisify(privateKey.export, { - context: privateKey - })(password) + const privateKey = await crypto.keys.import(pem, this._()) + return privateKey.export(password) } catch (err) { return throwDelayed(err) } @@ -400,21 +388,15 @@ class Keychain { let privateKey try { - privateKey = await promisify(crypto.keys.import, { - context: crypto.keys - })(pem, password) + privateKey = await crypto.keys.import(pem, password) } catch (err) { return throwDelayed(errcode(new Error('Cannot read the key, most likely the password is wrong'), 'ERR_CANNOT_READ_KEY')) } let kid try { - kid = await promisify(privateKey.id, { - context: privateKey - })() - pem = await promisify(privateKey.export, { - context: privateKey - })(this._()) + kid = await privateKey.id() + pem = await privateKey.export(this._()) } catch (err) { return throwDelayed(err) } @@ -446,12 +428,8 @@ class Keychain { if (exists) return throwDelayed(errcode(new Error(`Key '${name}' already exists`), 'ERR_KEY_ALREADY_EXISTS')) try { - const kid = await promisify(privateKey.id, { - context: privateKey - })() - const pem = await promisify(privateKey.export, { - context: privateKey - })(this._()) + const kid = await privateKey.id() + const pem = await privateKey.export(this._()) const keyInfo = { name: name, id: kid diff --git a/test/keychain.spec.js b/test/keychain.spec.js index 0756f5f252..c455f2d764 100644 --- a/test/keychain.spec.js +++ b/test/keychain.spec.js @@ -9,7 +9,6 @@ chai.use(require('dirty-chai')) chai.use(require('chai-string')) const Keychain = require('../') const PeerId = require('peer-id') -const promisify = require('promisify-es6') module.exports = (datastore1, datastore2) => { describe('keychain', () => { @@ -269,7 +268,7 @@ module.exports = (datastore1, datastore2) => { before(async function () { const encoded = Buffer.from(alicePrivKey, 'base64') - alice = await promisify(PeerId.createFromPrivKey)(encoded) + alice = await PeerId.createFromPrivKey(encoded) }) it('private key can be imported', async () => { diff --git a/test/peerid.js b/test/peerid.js index 42274db054..4360e5388d 100644 --- a/test/peerid.js +++ b/test/peerid.js @@ -10,7 +10,6 @@ const multihash = require('multihashes') const crypto = require('libp2p-crypto') const rsaUtils = require('libp2p-crypto/src/keys/rsa-utils') const rsaClass = require('libp2p-crypto/src/keys/rsa-class') -const promisify = require('promisify-es6') const sample = { id: '122019318b6e5e0cf93a2314bf01269a2cc23cd3dcd452d742cdb9379d8646f6e4a9', @@ -24,7 +23,7 @@ describe('peer ID', () => { before(async () => { const encoded = Buffer.from(sample.privKey, 'base64') - peer = await promisify(PeerId.createFromPrivKey)(encoded) + peer = await PeerId.createFromPrivKey(encoded) }) it('decoded public key', async () => { @@ -35,18 +34,14 @@ describe('peer ID', () => { // get protobuf version of the private key const privateKeyProtobuf = peer.marshalPrivKey() - const key = await promisify(crypto.keys.unmarshalPrivateKey, { - context: crypto.keys - })(privateKeyProtobuf) + const key = await crypto.keys.unmarshalPrivateKey(privateKeyProtobuf) expect(key).to.exist() }) it('encoded public key with DER', async () => { const jwk = rsaUtils.pkixToJwk(publicKeyDer) const rsa = new rsaClass.RsaPublicKey(jwk) - const keyId = await promisify(rsa.hash, { - context: rsa - })() + const keyId = await rsa.hash() const kids = multihash.toB58String(keyId) expect(kids).to.equal(peer.toB58String()) }) @@ -60,9 +55,7 @@ describe('peer ID', () => { kid: '2011-04-29' } const rsa = new rsaClass.RsaPublicKey(jwk) - const keyId = await promisify(rsa.hash, { - context: rsa - })() + const keyId = await rsa.hash() const kids = multihash.toB58String(keyId) expect(kids).to.equal(peer.toB58String()) }) @@ -70,9 +63,7 @@ describe('peer ID', () => { it('decoded private key', async () => { // get protobuf version of the private key const privateKeyProtobuf = peer.marshalPrivKey() - const key = await promisify(crypto.keys.unmarshalPrivateKey, { - context: crypto.keys - })(privateKeyProtobuf) + const key = await crypto.keys.unmarshalPrivateKey(privateKeyProtobuf) expect(key).to.exist() }) }) From 8ff68d1c502e4b46264fc9ba1380e0c34c96c4dd Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Wed, 18 Dec 2019 10:13:05 +0000 Subject: [PATCH 079/102] chore: update contributors --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index ff6dbc6d41..fe63067565 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "libp2p-keychain", - "version": "0.5.2", + "version": "0.5.3", "description": "Key management and cryptographically protected messages", "leadMaintainer": "Vasco Santos ", "main": "src/index.js", From be63323cef833672b585f4bab93e923138791179 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Wed, 18 Dec 2019 10:13:05 +0000 Subject: [PATCH 080/102] chore: release version v0.5.3 --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index dd36aa070e..6a11dbf9d1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ + +## [0.5.3](https://github.com/libp2p/js-libp2p-keychain/compare/v0.5.2...v0.5.3) (2019-12-18) + + + ## [0.5.2](https://github.com/libp2p/js-libp2p-keychain/compare/v0.5.1...v0.5.2) (2019-12-02) From 6b9516cb3c56c836f207fd267e561727b4710b31 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Wed, 18 Dec 2019 16:46:28 +0000 Subject: [PATCH 081/102] Revert "chore: update deps (#40)" This reverts commit b6d5313a550555b435bd301d811af75397dc91e4. --- package.json | 14 +++++++------- src/keychain.js | 42 ++++++++++++++++++++++++++++++++---------- test/keychain.spec.js | 3 ++- test/peerid.js | 19 ++++++++++++++----- 4 files changed, 55 insertions(+), 23 deletions(-) diff --git a/package.json b/package.json index fe63067565..56359fec60 100644 --- a/package.json +++ b/package.json @@ -41,9 +41,9 @@ "homepage": "https://github.com/libp2p/js-libp2p-keychain#readme", "dependencies": { "err-code": "^2.0.0", - "interface-datastore": "^0.8.0", - "libp2p-crypto": "^0.17.1", - "merge-options": "^2.0.0", + "interface-datastore": "^0.7.0", + "libp2p-crypto": "^0.16.2", + "merge-options": "^1.0.1", "node-forge": "^0.9.1", "sanitize-filename": "^1.6.1" }, @@ -52,13 +52,13 @@ "chai": "^4.2.0", "chai-string": "^1.5.0", "datastore-fs": "^0.9.0", - "datastore-level": "^0.14.0", + "datastore-level": "^0.12.1", "dirty-chai": "^2.0.1", - "level": "^6.0.0", + "level": "^5.0.1", "multihashes": "^0.4.15", - "peer-id": "^0.13.5", + "peer-id": "^0.12.2", "promisify-es6": "^1.0.3", - "rimraf": "^3.0.0" + "rimraf": "^2.6.3" }, "contributors": [ "Alan Shaw ", diff --git a/src/keychain.js b/src/keychain.js index aae7897224..7bd8ba34ac 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -5,6 +5,7 @@ const sanitize = require('sanitize-filename') const mergeOptions = require('merge-options') const crypto = require('libp2p-crypto') const DS = require('interface-datastore') +const promisify = require('promisify-es6') const CMS = require('./cms') const errcode = require('err-code') @@ -205,9 +206,16 @@ class Keychain { let keyInfo try { - const keypair = await crypto.keys.generateKeyPair(type, size) - const kid = await keypair.id() - const pem = await keypair.export(this._()) + const keypair = await promisify(crypto.keys.generateKeyPair, { + context: crypto.keys + })(type, size) + + const kid = await promisify(keypair.id, { + context: keypair + })() + const pem = await promisify(keypair.export, { + context: keypair + })(this._()) keyInfo = { name: name, id: kid @@ -359,8 +367,12 @@ class Keychain { try { const res = await this.store.get(dsname) const pem = res.toString() - const privateKey = await crypto.keys.import(pem, this._()) - return privateKey.export(password) + const privateKey = await promisify(crypto.keys.import, { + context: crypto.keys + })(pem, this._()) + return promisify(privateKey.export, { + context: privateKey + })(password) } catch (err) { return throwDelayed(err) } @@ -388,15 +400,21 @@ class Keychain { let privateKey try { - privateKey = await crypto.keys.import(pem, password) + privateKey = await promisify(crypto.keys.import, { + context: crypto.keys + })(pem, password) } catch (err) { return throwDelayed(errcode(new Error('Cannot read the key, most likely the password is wrong'), 'ERR_CANNOT_READ_KEY')) } let kid try { - kid = await privateKey.id() - pem = await privateKey.export(this._()) + kid = await promisify(privateKey.id, { + context: privateKey + })() + pem = await promisify(privateKey.export, { + context: privateKey + })(this._()) } catch (err) { return throwDelayed(err) } @@ -428,8 +446,12 @@ class Keychain { if (exists) return throwDelayed(errcode(new Error(`Key '${name}' already exists`), 'ERR_KEY_ALREADY_EXISTS')) try { - const kid = await privateKey.id() - const pem = await privateKey.export(this._()) + const kid = await promisify(privateKey.id, { + context: privateKey + })() + const pem = await promisify(privateKey.export, { + context: privateKey + })(this._()) const keyInfo = { name: name, id: kid diff --git a/test/keychain.spec.js b/test/keychain.spec.js index c455f2d764..0756f5f252 100644 --- a/test/keychain.spec.js +++ b/test/keychain.spec.js @@ -9,6 +9,7 @@ chai.use(require('dirty-chai')) chai.use(require('chai-string')) const Keychain = require('../') const PeerId = require('peer-id') +const promisify = require('promisify-es6') module.exports = (datastore1, datastore2) => { describe('keychain', () => { @@ -268,7 +269,7 @@ module.exports = (datastore1, datastore2) => { before(async function () { const encoded = Buffer.from(alicePrivKey, 'base64') - alice = await PeerId.createFromPrivKey(encoded) + alice = await promisify(PeerId.createFromPrivKey)(encoded) }) it('private key can be imported', async () => { diff --git a/test/peerid.js b/test/peerid.js index 4360e5388d..42274db054 100644 --- a/test/peerid.js +++ b/test/peerid.js @@ -10,6 +10,7 @@ const multihash = require('multihashes') const crypto = require('libp2p-crypto') const rsaUtils = require('libp2p-crypto/src/keys/rsa-utils') const rsaClass = require('libp2p-crypto/src/keys/rsa-class') +const promisify = require('promisify-es6') const sample = { id: '122019318b6e5e0cf93a2314bf01269a2cc23cd3dcd452d742cdb9379d8646f6e4a9', @@ -23,7 +24,7 @@ describe('peer ID', () => { before(async () => { const encoded = Buffer.from(sample.privKey, 'base64') - peer = await PeerId.createFromPrivKey(encoded) + peer = await promisify(PeerId.createFromPrivKey)(encoded) }) it('decoded public key', async () => { @@ -34,14 +35,18 @@ describe('peer ID', () => { // get protobuf version of the private key const privateKeyProtobuf = peer.marshalPrivKey() - const key = await crypto.keys.unmarshalPrivateKey(privateKeyProtobuf) + const key = await promisify(crypto.keys.unmarshalPrivateKey, { + context: crypto.keys + })(privateKeyProtobuf) expect(key).to.exist() }) it('encoded public key with DER', async () => { const jwk = rsaUtils.pkixToJwk(publicKeyDer) const rsa = new rsaClass.RsaPublicKey(jwk) - const keyId = await rsa.hash() + const keyId = await promisify(rsa.hash, { + context: rsa + })() const kids = multihash.toB58String(keyId) expect(kids).to.equal(peer.toB58String()) }) @@ -55,7 +60,9 @@ describe('peer ID', () => { kid: '2011-04-29' } const rsa = new rsaClass.RsaPublicKey(jwk) - const keyId = await rsa.hash() + const keyId = await promisify(rsa.hash, { + context: rsa + })() const kids = multihash.toB58String(keyId) expect(kids).to.equal(peer.toB58String()) }) @@ -63,7 +70,9 @@ describe('peer ID', () => { it('decoded private key', async () => { // get protobuf version of the private key const privateKeyProtobuf = peer.marshalPrivKey() - const key = await crypto.keys.unmarshalPrivateKey(privateKeyProtobuf) + const key = await promisify(crypto.keys.unmarshalPrivateKey, { + context: crypto.keys + })(privateKeyProtobuf) expect(key).to.exist() }) }) From 66c1fb37b617578bb7f9512d774629fe3b5c0572 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Wed, 18 Dec 2019 16:52:29 +0000 Subject: [PATCH 082/102] chore: update contributors --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 56359fec60..a3e7fbdcbd 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "libp2p-keychain", - "version": "0.5.3", + "version": "0.5.4", "description": "Key management and cryptographically protected messages", "leadMaintainer": "Vasco Santos ", "main": "src/index.js", From 0d13a8b729e22e3a30084296f900e11026dcdc24 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Wed, 18 Dec 2019 16:52:29 +0000 Subject: [PATCH 083/102] chore: release version v0.5.4 --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6a11dbf9d1..71becfb43a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ + +## [0.5.4](https://github.com/libp2p/js-libp2p-keychain/compare/v0.5.3...v0.5.4) (2019-12-18) + + + ## [0.5.3](https://github.com/libp2p/js-libp2p-keychain/compare/v0.5.2...v0.5.3) (2019-12-18) From 464fcbeddf599f9dc561a451fb57be8a022be657 Mon Sep 17 00:00:00 2001 From: Alan Shaw Date: Wed, 18 Dec 2019 10:04:20 +0000 Subject: [PATCH 084/102] chore: update deps (#40) --- package.json | 14 +++++++------- src/keychain.js | 42 ++++++++++-------------------------------- test/keychain.spec.js | 3 +-- test/peerid.js | 19 +++++-------------- 4 files changed, 23 insertions(+), 55 deletions(-) diff --git a/package.json b/package.json index a3e7fbdcbd..90b6f34cee 100644 --- a/package.json +++ b/package.json @@ -41,9 +41,9 @@ "homepage": "https://github.com/libp2p/js-libp2p-keychain#readme", "dependencies": { "err-code": "^2.0.0", - "interface-datastore": "^0.7.0", - "libp2p-crypto": "^0.16.2", - "merge-options": "^1.0.1", + "interface-datastore": "^0.8.0", + "libp2p-crypto": "^0.17.1", + "merge-options": "^2.0.0", "node-forge": "^0.9.1", "sanitize-filename": "^1.6.1" }, @@ -52,13 +52,13 @@ "chai": "^4.2.0", "chai-string": "^1.5.0", "datastore-fs": "^0.9.0", - "datastore-level": "^0.12.1", + "datastore-level": "^0.14.0", "dirty-chai": "^2.0.1", - "level": "^5.0.1", + "level": "^6.0.0", "multihashes": "^0.4.15", - "peer-id": "^0.12.2", + "peer-id": "^0.13.5", "promisify-es6": "^1.0.3", - "rimraf": "^2.6.3" + "rimraf": "^3.0.0" }, "contributors": [ "Alan Shaw ", diff --git a/src/keychain.js b/src/keychain.js index 7bd8ba34ac..aae7897224 100644 --- a/src/keychain.js +++ b/src/keychain.js @@ -5,7 +5,6 @@ const sanitize = require('sanitize-filename') const mergeOptions = require('merge-options') const crypto = require('libp2p-crypto') const DS = require('interface-datastore') -const promisify = require('promisify-es6') const CMS = require('./cms') const errcode = require('err-code') @@ -206,16 +205,9 @@ class Keychain { let keyInfo try { - const keypair = await promisify(crypto.keys.generateKeyPair, { - context: crypto.keys - })(type, size) - - const kid = await promisify(keypair.id, { - context: keypair - })() - const pem = await promisify(keypair.export, { - context: keypair - })(this._()) + const keypair = await crypto.keys.generateKeyPair(type, size) + const kid = await keypair.id() + const pem = await keypair.export(this._()) keyInfo = { name: name, id: kid @@ -367,12 +359,8 @@ class Keychain { try { const res = await this.store.get(dsname) const pem = res.toString() - const privateKey = await promisify(crypto.keys.import, { - context: crypto.keys - })(pem, this._()) - return promisify(privateKey.export, { - context: privateKey - })(password) + const privateKey = await crypto.keys.import(pem, this._()) + return privateKey.export(password) } catch (err) { return throwDelayed(err) } @@ -400,21 +388,15 @@ class Keychain { let privateKey try { - privateKey = await promisify(crypto.keys.import, { - context: crypto.keys - })(pem, password) + privateKey = await crypto.keys.import(pem, password) } catch (err) { return throwDelayed(errcode(new Error('Cannot read the key, most likely the password is wrong'), 'ERR_CANNOT_READ_KEY')) } let kid try { - kid = await promisify(privateKey.id, { - context: privateKey - })() - pem = await promisify(privateKey.export, { - context: privateKey - })(this._()) + kid = await privateKey.id() + pem = await privateKey.export(this._()) } catch (err) { return throwDelayed(err) } @@ -446,12 +428,8 @@ class Keychain { if (exists) return throwDelayed(errcode(new Error(`Key '${name}' already exists`), 'ERR_KEY_ALREADY_EXISTS')) try { - const kid = await promisify(privateKey.id, { - context: privateKey - })() - const pem = await promisify(privateKey.export, { - context: privateKey - })(this._()) + const kid = await privateKey.id() + const pem = await privateKey.export(this._()) const keyInfo = { name: name, id: kid diff --git a/test/keychain.spec.js b/test/keychain.spec.js index 0756f5f252..c455f2d764 100644 --- a/test/keychain.spec.js +++ b/test/keychain.spec.js @@ -9,7 +9,6 @@ chai.use(require('dirty-chai')) chai.use(require('chai-string')) const Keychain = require('../') const PeerId = require('peer-id') -const promisify = require('promisify-es6') module.exports = (datastore1, datastore2) => { describe('keychain', () => { @@ -269,7 +268,7 @@ module.exports = (datastore1, datastore2) => { before(async function () { const encoded = Buffer.from(alicePrivKey, 'base64') - alice = await promisify(PeerId.createFromPrivKey)(encoded) + alice = await PeerId.createFromPrivKey(encoded) }) it('private key can be imported', async () => { diff --git a/test/peerid.js b/test/peerid.js index 42274db054..4360e5388d 100644 --- a/test/peerid.js +++ b/test/peerid.js @@ -10,7 +10,6 @@ const multihash = require('multihashes') const crypto = require('libp2p-crypto') const rsaUtils = require('libp2p-crypto/src/keys/rsa-utils') const rsaClass = require('libp2p-crypto/src/keys/rsa-class') -const promisify = require('promisify-es6') const sample = { id: '122019318b6e5e0cf93a2314bf01269a2cc23cd3dcd452d742cdb9379d8646f6e4a9', @@ -24,7 +23,7 @@ describe('peer ID', () => { before(async () => { const encoded = Buffer.from(sample.privKey, 'base64') - peer = await promisify(PeerId.createFromPrivKey)(encoded) + peer = await PeerId.createFromPrivKey(encoded) }) it('decoded public key', async () => { @@ -35,18 +34,14 @@ describe('peer ID', () => { // get protobuf version of the private key const privateKeyProtobuf = peer.marshalPrivKey() - const key = await promisify(crypto.keys.unmarshalPrivateKey, { - context: crypto.keys - })(privateKeyProtobuf) + const key = await crypto.keys.unmarshalPrivateKey(privateKeyProtobuf) expect(key).to.exist() }) it('encoded public key with DER', async () => { const jwk = rsaUtils.pkixToJwk(publicKeyDer) const rsa = new rsaClass.RsaPublicKey(jwk) - const keyId = await promisify(rsa.hash, { - context: rsa - })() + const keyId = await rsa.hash() const kids = multihash.toB58String(keyId) expect(kids).to.equal(peer.toB58String()) }) @@ -60,9 +55,7 @@ describe('peer ID', () => { kid: '2011-04-29' } const rsa = new rsaClass.RsaPublicKey(jwk) - const keyId = await promisify(rsa.hash, { - context: rsa - })() + const keyId = await rsa.hash() const kids = multihash.toB58String(keyId) expect(kids).to.equal(peer.toB58String()) }) @@ -70,9 +63,7 @@ describe('peer ID', () => { it('decoded private key', async () => { // get protobuf version of the private key const privateKeyProtobuf = peer.marshalPrivKey() - const key = await promisify(crypto.keys.unmarshalPrivateKey, { - context: crypto.keys - })(privateKeyProtobuf) + const key = await crypto.keys.unmarshalPrivateKey(privateKeyProtobuf) expect(key).to.exist() }) }) From 24e10f378b0c53df198149c1f9e9d8d4cb90b07a Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Wed, 18 Dec 2019 16:58:32 +0000 Subject: [PATCH 085/102] chore: update contributors --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 90b6f34cee..29b8873a60 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "libp2p-keychain", - "version": "0.5.4", + "version": "0.6.0", "description": "Key management and cryptographically protected messages", "leadMaintainer": "Vasco Santos ", "main": "src/index.js", From 44a1e7c709e71aae37f968305340d9ccfbd4ee49 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Wed, 18 Dec 2019 16:58:32 +0000 Subject: [PATCH 086/102] chore: release version v0.6.0 --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 71becfb43a..f661d419b0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ + +# [0.6.0](https://github.com/libp2p/js-libp2p-keychain/compare/v0.5.4...v0.6.0) (2019-12-18) + + + ## [0.5.4](https://github.com/libp2p/js-libp2p-keychain/compare/v0.5.3...v0.5.4) (2019-12-18) From 55fb5d5364cbd55caf31bd4b91620912301e4c6b Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Thu, 20 Feb 2020 09:28:23 -0500 Subject: [PATCH 087/102] chore(deps-dev): bump aegir from 20.6.1 to 21.2.0 (#44) Bumps [aegir](https://github.com/ipfs/aegir) from 20.6.1 to 21.2.0. - [Release notes](https://github.com/ipfs/aegir/releases) - [Changelog](https://github.com/ipfs/aegir/blob/master/CHANGELOG.md) - [Commits](https://github.com/ipfs/aegir/compare/v20.6.1...v21.2.0) Signed-off-by: dependabot-preview[bot] --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 29b8873a60..43ce186a25 100644 --- a/package.json +++ b/package.json @@ -48,7 +48,7 @@ "sanitize-filename": "^1.6.1" }, "devDependencies": { - "aegir": "^20.0.0", + "aegir": "^21.2.0", "chai": "^4.2.0", "chai-string": "^1.5.0", "datastore-fs": "^0.9.0", From be45fc498fac50c97651dabd82a1071f0f5f78a7 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Thu, 9 Apr 2020 16:07:18 +0200 Subject: [PATCH 088/102] feat: address and proto books (#590) * feat: address and proto books * chore: apply suggestions from code review Co-Authored-By: Jacob Heun * chore: minor fixes and initial tests added * chore: integrate new peer-store with code using adapters for other modules * chore: do not use peerstore.put on get-peer-info * chore: apply suggestions from code review Co-Authored-By: Jacob Heun * chore: add new peer store tests * chore: apply suggestions from code review Co-Authored-By: Jacob Heun Co-authored-by: Jacob Heun --- doc/API.md | 409 +++++++++++++++++++-- src/dialer/index.js | 36 +- src/get-peer-info.js | 7 +- src/identify/index.js | 55 +-- src/index.js | 15 +- src/peer-store/README.md | 90 ++++- src/peer-store/address-book.js | 214 +++++++++++ src/peer-store/book.js | 87 +++++ src/peer-store/index.js | 322 ++++++++-------- src/peer-store/proto-book.js | 137 +++++++ src/ping/index.js | 4 +- src/registrar.js | 26 +- src/upgrader.js | 4 +- test/content-routing/dht/operation.node.js | 7 +- test/dialing/direct.node.js | 17 +- test/dialing/direct.spec.js | 46 ++- test/dialing/relay.node.js | 7 +- test/identify/index.spec.js | 57 ++- test/peer-discovery/index.spec.js | 8 +- test/peer-store/address-book.spec.js | 365 ++++++++++++++++++ test/peer-store/peer-store.spec.js | 244 ++++++------ test/peer-store/proto-book.spec.js | 310 ++++++++++++++++ test/registrar/registrar.spec.js | 18 +- 23 files changed, 2012 insertions(+), 473 deletions(-) create mode 100644 src/peer-store/address-book.js create mode 100644 src/peer-store/book.js create mode 100644 src/peer-store/proto-book.js create mode 100644 test/peer-store/address-book.spec.js create mode 100644 test/peer-store/proto-book.spec.js diff --git a/doc/API.md b/doc/API.md index 3f12f22dfb..bfaa9f778b 100644 --- a/doc/API.md +++ b/doc/API.md @@ -17,6 +17,18 @@ * [`contentRouting.put`](#contentroutingput) * [`contentRouting.get`](#contentroutingget) * [`contentRouting.getMany`](#contentroutinggetmany) + * [`peerStore.addressBook.add`](#peerstoreaddressbookadd) + * [`peerStore.addressBook.delete`](#peerstoreaddressbookdelete) + * [`peerStore.addressBook.get`](#peerstoreaddressbookget) + * [`peerStore.addressBook.getMultiaddrsForPeer`](#peerstoreaddressbookgetmultiaddrsforpeer) + * [`peerStore.addressBook.set`](#peerstoreaddressbookset) + * [`peerStore.protoBook.add`](#peerstoreprotobookadd) + * [`peerStore.protoBook.delete`](#peerstoreprotobookdelete) + * [`peerStore.protoBook.get`](#peerstoreprotobookget) + * [`peerStore.protoBook.set`](#peerstoreprotobookset) + * [`peerStore.delete`](#peerstoredelete) + * [`peerStore.get`](#peerstoreget) + * [`peerStore.peers`](#peerstorepeers) * [`pubsub.getSubscribers`](#pubsubgetsubscribers) * [`pubsub.getTopics`](#pubsubgettopics) * [`pubsub.publish`](#pubsubpublish) @@ -44,13 +56,13 @@ Creates an instance of Libp2p. | Name | Type | Description | |------|------|-------------| -| options | `Object` | libp2p options | -| options.modules | `Array` | libp2p modules to use | -| [options.config] | `Object` | libp2p modules configuration and core configuration | -| [options.connectionManager] | `Object` | libp2p Connection Manager configuration | -| [options.datastore] | `Object` | must implement [ipfs/interface-datastore](https://github.com/ipfs/interface-datastore) (in memory datastore will be used if not provided) | -| [options.dialer] | `Object` | libp2p Dialer configuration -| [options.metrics] | `Object` | libp2p Metrics configuration +| options | `object` | libp2p options | +| options.modules | `Array` | libp2p modules to use | +| [options.config] | `object` | libp2p modules configuration and core configuration | +| [options.connectionManager] | `object` | libp2p Connection Manager configuration | +| [options.datastore] | `object` | must implement [ipfs/interface-datastore](https://github.com/ipfs/interface-datastore) (in memory datastore will be used if not provided) | +| [options.dialer] | `object` | libp2p Dialer configuration +| [options.metrics] | `object` | libp2p Metrics configuration | [options.peerInfo] | [`PeerInfo`][peer-info] | peerInfo instance (it will be created if not provided) | For Libp2p configurations and modules details read the [Configuration Document](./CONFIGURATION.md). @@ -181,7 +193,7 @@ for (const [peerId, connections] of libp2p.connections) { | Name | Type | Description | |------|------|-------------| | peer | [`PeerInfo`][peer-info]\|[`PeerId`][peer-id]\|[`Multiaddr`][multiaddr]\|`string` | The peer to dial. If a [`Multiaddr`][multiaddr] or its string is provided, it **must** include the peer id | -| [options] | `Object` | dial options | +| [options] | `object` | dial options | | [options.signal] | [`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal) | An `AbortSignal` instance obtained from an [`AbortController`](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) that can be used to abort the connection before it completes | #### Returns @@ -216,8 +228,8 @@ Dials to another peer in the network and selects a protocol to communicate with | Name | Type | Description | |------|------|-------------| | peer | [`PeerInfo`][peer-info]\|[`PeerId`][peer-id]\|[`Multiaddr`][multiaddr]\|`string` | The peer to dial. If a [`Multiaddr`][multiaddr] or its string is provided, it **must** include the peer id | -| protocols | `String|Array` | A list of protocols (or single protocol) to negotiate with. Protocols are attempted in order until a match is made. (e.g '/ipfs/bitswap/1.1.0') | -| [options] | `Object` | dial options | +| protocols | `string|Array` | A list of protocols (or single protocol) to negotiate with. Protocols are attempted in order until a match is made. (e.g '/ipfs/bitswap/1.1.0') | +| [options] | `object` | dial options | | [options.signal] | [`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal) | An `AbortSignal` instance obtained from an [`AbortController`](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) that can be used to abort the connection before it completes | #### Returns @@ -275,7 +287,7 @@ In the event of a new handler for the same protocol being added, the first one i | Name | Type | Description | |------|------|-------------| -| protocols | `Array|String` | protocols to register | +| protocols | `Array|string` | protocols to register | | handler | `function({ connection:*, stream:*, protocol:string })` | handler to call | @@ -300,7 +312,7 @@ Unregisters all handlers with the given protocols | Name | Type | Description | |------|------|-------------| -| protocols | `Array|String` | protocols to unregister | +| protocols | `Array|string` | protocols to unregister | #### Example @@ -345,7 +357,7 @@ Iterates over all peer routers in series to find the given peer. If the DHT is e | Name | Type | Description | |------|------|-------------| | peerId | [`PeerId`][peer-id] | ID of the peer to find | -| options | `Object` | operation options | +| options | `object` | operation options | | options.timeout | `number` | maximum time the query should run | #### Returns @@ -373,7 +385,7 @@ Once a content router succeeds, the iteration will stop. If the DHT is enabled, | Name | Type | Description | |------|------|-------------| | cid | [`CID`][cid] | cid to find | -| options | `Object` | operation options | +| options | `object` | operation options | | options.timeout | `number` | maximum time the query should run | | options.maxNumProviders | `number` | maximum number of providers to find | @@ -427,9 +439,9 @@ Writes a value to a key in the DHT. | Name | Type | Description | |------|------|-------------| -| key | `String` | key to add to the dht | +| key | `string` | key to add to the dht | | value | `Buffer` | value to add to the dht | -| [options] | `Object` | put options | +| [options] | `object` | put options | | [options.minPeers] | `number` | minimum number of peers required to successfully put (default: closestPeers.length) | #### Returns @@ -458,8 +470,8 @@ Queries the DHT for a value stored for a given key. | Name | Type | Description | |------|------|-------------| -| key | `String` | key to get from the dht | -| [options] | `Object` | get options | +| key | `string` | key to get from the dht | +| [options] | `object` | get options | | [options.timeout] | `number` | maximum time the query should run | #### Returns @@ -487,9 +499,9 @@ Queries the DHT for the n values stored for the given key (without sorting). | Name | Type | Description | |------|------|-------------| -| key | `String` | key to get from the dht | +| key | `string` | key to get from the dht | | nvals | `number` | number of values aimed | -| [options] | `Object` | get options | +| [options] | `object` | get options | | [options.timeout] | `number` | maximum time the query should run | #### Returns @@ -507,6 +519,348 @@ const key = '/key' const { from, val } = await libp2p.contentRouting.get(key) ``` +### peerStore.addressBook.add + +Adds known `multiaddrs` of a given peer. If the peer is not known, it will be set with the provided multiaddrs. + +`peerStore.addressBook.add(peerId, multiaddrs)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| peerId | [`PeerId`][peer-id] | peerId to set | +| multiaddrs | |`Array` | [`Multiaddrs`][multiaddr] to add | + +#### Returns + +| Type | Description | +|------|-------------| +| `AddressBook` | Returns the Address Book component | + +#### Example + +```js +peerStore.addressBook.add(peerId, multiaddr) +``` + +### peerStore.addressBook.delete + +Delete the provided peer from the book. + +`peerStore.addressBook.delete(peerId)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| peerId | [`PeerId`][peer-id] | peerId to remove | + +#### Returns + +| Type | Description | +|------|-------------| +| `boolean` | true if found and removed | + +#### Example + +```js +peerStore.addressBook.delete(peerId) +// false +peerStore.addressBook.set(peerId, multiaddr) +peerStore.addressBook.delete(peerId) +// true +``` + +### peerStore.addressBook.get + +Get the known [`MultiaddrInfos`][multiaddr-info] of a provided peer. + +`peerStore.addressBook.get(peerId)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| peerId | [`PeerId`][peer-id] | peerId to get | + +#### Returns + +| Type | Description | +|------|-------------| +| `Array` | Array of peer's multiaddr with their relevant information [`MultiaddrInfo`][multiaddr-info] | + +#### Example + +```js +peerStore.addressBook.get(peerId) +// undefined +peerStore.addressBook.set(peerId, multiaddr) +peerStore.addressBook.get(peerId) +// [ +// { +// multiaddr: /ip4/140.10.2.1/tcp/8000, +// ... +// }, +// { +// multiaddr: /ip4/140.10.2.1/ws/8001 +// ... +// }, +// ] +``` + +## peerStore.addressBook.getMultiaddrsForPeer + +Get the known `Multiaddr` of a provided peer. All returned multiaddrs will include the encapsulated `PeerId` of the peer. + +`peerStore.addressBook.getMultiaddrsForPeer(peerId)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| peerId | [`PeerId`][peer-id] | peerId to get | + +#### Returns + +| Type | Description | +|------|-------------| +| `Array` | Array of peer's multiaddr | + +#### Example + +```js +peerStore.addressBook.getMultiaddrsForPeer(peerId) +// undefined +peerStore.addressBook.set(peerId, multiaddr) +peerStore.addressBook.getMultiaddrsForPeer(peerId) +// [ +// /ip4/140.10.2.1/tcp/8000/p2p/QmW8rAgaaA6sRydK1k6vonShQME47aDxaFidbtMevWs73t +// /ip4/140.10.2.1/ws/8001/p2p/QmW8rAgaaA6sRydK1k6vonShQME47aDxaFidbtMevWs73t +// ] +``` + +### peerStore.addressBook.set + +Set known `multiaddrs` of a given peer. + +`peerStore.addressBook.set(peerId, multiaddrs)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| peerId | [`PeerId`][peer-id] | peerId to set | +| multiaddrs | |`Array` | [`Multiaddrs`][multiaddr] to store | + +#### Returns + +| Type | Description | +|------|-------------| +| `AddressBook` | Returns the Address Book component | + +#### Example + +```js +peerStore.addressBook.add(peerId, multiaddr) +``` + +### peerStore.protoBook.add + +Add known `protocols` of a given peer. + +`peerStore.protoBook.add(peerId, protocols)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| peerId | [`PeerId`][peer-id] | peerId to set | +| protocols | `Array` | protocols to add | + +#### Returns + +| Type | Description | +|------|-------------| +| `ProtoBook` | Returns the Proto Book component | + +#### Example + +```js +peerStore.protoBook.add(peerId, protocols) +``` + +### peerStore.protoBook.delete + +Delete the provided peer from the book. + +`peerStore.protoBook.delete(peerId)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| peerId | [`PeerId`][peer-id] | peerId to remove | + +#### Returns + +| Type | Description | +|------|-------------| +| `boolean` | true if found and removed | + +#### Example + +```js +peerStore.protoBook.delete(peerId) +// false +peerStore.protoBook.set(peerId, protocols) +peerStore.protoBook.delete(peerId) +// true +``` + +### peerStore.protoBook.get + +Get the known `protocols` of a provided peer. + +`peerStore.protoBook.get(peerId)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| peerId | [`PeerId`][peer-id] | peerId to get | + +#### Returns + +| Type | Description | +|------|-------------| +| `Array` | Array of peer's supported protocols | + +#### Example + +```js +peerStore.protoBook.get(peerId) +// undefined +peerStore.protoBook.set(peerId, [ '/proto/1.0.0', '/proto/1.1.0' ]) +peerStore.protoBook.get(peerId) +// [ '/proto/1.0.0', '/proto/1.1.0' ] +``` + +### peerStore.protoBook.set + +Set known `protocols` of a given peer. + +`peerStore.protoBook.set(peerId, protocols)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| peerId | [`PeerId`][peer-id] | peerId to set | +| protocols | `Array` | protocols to store | + +#### Returns + +| Type | Description | +|------|-------------| +| `ProtoBook` | Returns the Proto Book component | + +#### Example + +```js +peerStore.protoBook.set(peerId, protocols) +``` + +### peerStore.delete + +Delete the provided peer from every book. + +`peerStore.delete(peerId)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| peerId | [`PeerId`][peer-id] | peerId to remove | + +#### Returns + +| Type | Description | +|------|-------------| +| `boolean` | true if found and removed | + +#### Example + +```js +peerStore.delete(peerId) +// false +peerStore.addressBook.set(peerId, multiaddrs) +peerStore.protoBook.set(peerId, protocols) +peerStore.delete(peerId) +// true +peerStore.delete(peerId2) +// false +peerStore.addressBook.set(peerId2, multiaddrs) +peerStore.delete(peerId2) +// true +``` + +### peerStore.get + +Get the stored information of a given peer. + +`peerStore.get(peerId)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| peerId | [`PeerId`][peer-id] | peerId to get | + +#### Returns + +| Type | Description | +|------|-------------| +| [`PeerInfo`][peer-info] | Peer information of the provided peer | + +TODO: change when `peer-info` is deprecated to new pointer + +#### Example + +```js +peerStore.get(peerId) +// false +peerStore.addressBook.set(peerId, multiaddrs) +peerStore.protoBook.set(peerId, protocols) +peerStore.get(peerId) +// { +// MultiaddrInfos: [...], +// protocols: [...] +// } +``` + +### peerStore.peers + +Get all the stored information of every peer. + +`peerStore.peers` + +#### Returns + +| Type | Description | +|------|-------------| +| `Map` | Peer information of every peer | + +TODO: change when `peer-info` is deprecated to new pointer (breaking change) + +#### Example + +```js +for (let [peerIdString, peerInfo] of peerStore.peers.entries()) { + // peerInfo instance +} +``` + ### pubsub.getSubscribers Gets a list of the peer-ids that are subscribed to one topic. @@ -523,7 +877,7 @@ Gets a list of the peer-ids that are subscribed to one topic. | Type | Description | |------|-------------| -| `Array` | peer-id subscribed to the topic | +| `Array` | peer-id subscribed to the topic | #### Example @@ -541,7 +895,7 @@ Gets a list of topics the node is subscribed to. | Type | Description | |------|-------------| -| `Array` | topics the node is subscribed to | +| `Array` | topics the node is subscribed to | #### Example @@ -588,7 +942,7 @@ Subscribes the given handler to a pubsub topic. | Name | Type | Description | |------|------|-------------| | topic | `string` | topic to subscribe | -| handler | `function({ from: String, data: Buffer, seqno: Buffer, topicIDs: Array, signature: Buffer, key: Buffer })` | handler for new data on topic | +| handler | `function({ from: string, data: Buffer, seqno: Buffer, topicIDs: Array, signature: Buffer, key: Buffer })` | handler for new data on topic | #### Returns @@ -618,7 +972,7 @@ Unsubscribes the given handler from a pubsub topic. If no handler is provided, a | Name | Type | Description | |------|------|-------------| | topic | `string` | topic to unsubscribe | -| handler | `function()` | handler subscribed | +| handler | `function()` | handler subscribed | #### Returns @@ -787,9 +1141,9 @@ This event will be triggered anytime we are disconnected from another peer, rega - `dataReceived`: The stringified value of total incoming data for this stat. - `dataSent`: The stringified value of total outgoing data for this stat. - `movingAverages`: The properties are dependent on the configuration of the moving averages interval. Defaults are listed here. - - `['60000']`: The calculated moving average at a 1 minute interval. - - `['300000']`: The calculated moving average at a 5 minute interval. - - `['900000']`: The calculated moving average at a 15 minute interval. + - `['60000']`: The calculated moving average at a 1 minute interval. + - `['300000']`: The calculated moving average at a 5 minute interval. + - `['900000']`: The calculated moving average at a 15 minute interval. - `snapshot`: A getter that returns a clone of the raw stats. - `dataReceived`: A [`BigNumber`](https://github.com/MikeMcl/bignumber.js/) of the amount of incoming data - `dataSent`: A [`BigNumber`](https://github.com/MikeMcl/bignumber.js/) of the amount of outgoing data @@ -798,6 +1152,7 @@ This event will be triggered anytime we are disconnected from another peer, rega - `['300000']`: The [MovingAverage](https://www.npmjs.com/package/moving-averages) at a 5 minute interval. - `['900000']`: The [MovingAverage](https://www.npmjs.com/package/moving-averages) at a 15 minute interval. +[multiaddr-info]: https://github.com/libp2p/js-libp2p/tree/master/src/peer-store/address-book.js [cid]: https://github.com/multiformats/js-cid [connection]: https://github.com/libp2p/js-interfaces/tree/master/src/connection [multiaddr]: https://github.com/multiformats/js-multiaddr diff --git a/src/dialer/index.js b/src/dialer/index.js index 7638ffe572..de9a394394 100644 --- a/src/dialer/index.js +++ b/src/dialer/index.js @@ -5,7 +5,6 @@ const errCode = require('err-code') const TimeoutController = require('timeout-abort-controller') const anySignal = require('any-signal') const PeerId = require('peer-id') -const PeerInfo = require('peer-info') const debug = require('debug') const log = debug('libp2p:dialer') log.error = debug('libp2p:dialer:error') @@ -62,13 +61,13 @@ class Dialer { * The dial to the first address that is successfully able to upgrade a connection * will be used. * - * @param {PeerInfo|Multiaddr} peer The peer to dial + * @param {PeerId|Multiaddr} peerId The peer to dial * @param {object} [options] * @param {AbortSignal} [options.signal] An AbortController signal * @returns {Promise} */ - async connectToPeer (peer, options = {}) { - const dialTarget = this._createDialTarget(peer) + async connectToPeer (peerId, options = {}) { + const dialTarget = this._createDialTarget(peerId) if (dialTarget.addrs.length === 0) { throw errCode(new Error('The dial request has no addresses'), codes.ERR_NO_VALID_ADDRESSES) } @@ -100,7 +99,7 @@ class Dialer { * Creates a DialTarget. The DialTarget is used to create and track * the DialRequest to a given peer. * @private - * @param {PeerInfo|Multiaddr} peer A PeerId or Multiaddr + * @param {PeerId|Multiaddr} peer A PeerId or Multiaddr * @returns {DialTarget} */ _createDialTarget (peer) { @@ -111,7 +110,10 @@ class Dialer { addrs: [dialable] } } - const addrs = this.peerStore.multiaddrsForPeer(dialable) + + dialable.multiaddrs && this.peerStore.addressBook.add(dialable.id, Array.from(dialable.multiaddrs)) + const addrs = this.peerStore.addressBook.getMultiaddrsForPeer(dialable.id) + return { id: dialable.id.toB58String(), addrs @@ -179,21 +181,27 @@ class Dialer { this.tokens.push(token) } + /** + * PeerInfo object + * @typedef {Object} peerInfo + * @property {Multiaddr} multiaddr peer multiaddr. + * @property {PeerId} id peer id. + */ + /** * Converts the given `peer` into a `PeerInfo` or `Multiaddr`. * @static - * @param {PeerInfo|PeerId|Multiaddr|string} peer - * @returns {PeerInfo|Multiaddr} + * @param {PeerId|Multiaddr|string} peer + * @returns {peerInfo|Multiaddr} */ static getDialable (peer) { - if (PeerInfo.isPeerInfo(peer)) return peer if (typeof peer === 'string') { peer = multiaddr(peer) } - let addr + let addrs if (multiaddr.isMultiaddr(peer)) { - addr = peer + addrs = new Set([peer]) // TODO: after peer-info removal, a Set should not be needed try { peer = PeerId.createFromCID(peer.getPeerId()) } catch (err) { @@ -202,10 +210,12 @@ class Dialer { } if (PeerId.isPeerId(peer)) { - peer = new PeerInfo(peer) + peer = { + id: peer, + multiaddrs: addrs + } } - addr && peer.multiaddrs.add(addr) return peer } } diff --git a/src/get-peer-info.js b/src/get-peer-info.js index 01a6bc49f2..5b0748ea8e 100644 --- a/src/get-peer-info.js +++ b/src/get-peer-info.js @@ -38,7 +38,12 @@ function getPeerInfo (peer, peerStore) { addr && peer.multiaddrs.add(addr) - return peerStore ? peerStore.put(peer) : peer + if (peerStore) { + peerStore.addressBook.add(peer.id, peer.multiaddrs.toArray()) + peerStore.protoBook.add(peer.id, Array.from(peer.protocols)) + } + + return peer } /** diff --git a/src/identify/index.js b/src/identify/index.js index d8643611b9..3fe06f4f5c 100644 --- a/src/identify/index.js +++ b/src/identify/index.js @@ -6,7 +6,6 @@ const lp = require('it-length-prefixed') const pipe = require('it-pipe') const { collect, take, consume } = require('streaming-iterables') -const PeerInfo = require('peer-info') const PeerId = require('peer-id') const multiaddr = require('multiaddr') const { toBuffer } = require('it-buffer') @@ -27,39 +26,6 @@ const errCode = require('err-code') const { codes } = require('../errors') class IdentifyService { - /** - * Replaces the multiaddrs on the given `peerInfo`, - * with the provided `multiaddrs` - * @param {PeerInfo} peerInfo - * @param {Array|Array} multiaddrs - */ - static updatePeerAddresses (peerInfo, multiaddrs) { - if (multiaddrs && multiaddrs.length > 0) { - peerInfo.multiaddrs.clear() - multiaddrs.forEach(ma => { - try { - peerInfo.multiaddrs.add(ma) - } catch (err) { - log.error('could not add multiaddr', err) - } - }) - } - } - - /** - * Replaces the protocols on the given `peerInfo`, - * with the provided `protocols` - * @static - * @param {PeerInfo} peerInfo - * @param {Array} protocols - */ - static updatePeerProtocols (peerInfo, protocols) { - if (protocols && protocols.length > 0) { - peerInfo.protocols.clear() - protocols.forEach(proto => peerInfo.protocols.add(proto)) - } - } - /** * Takes the `addr` and converts it to a Multiaddr if possible * @param {Buffer|String} addr @@ -181,7 +147,7 @@ class IdentifyService { } = message const id = await PeerId.createFromPubKey(publicKey) - const peerInfo = new PeerInfo(id) + if (connection.remotePeer.toB58String() !== id.toB58String()) { throw errCode(new Error('identified peer does not match the expected peer'), codes.ERR_INVALID_PEER) } @@ -189,11 +155,10 @@ class IdentifyService { // Get the observedAddr if there is one observedAddr = IdentifyService.getCleanMultiaddr(observedAddr) - // Copy the listenAddrs and protocols - IdentifyService.updatePeerAddresses(peerInfo, listenAddrs) - IdentifyService.updatePeerProtocols(peerInfo, protocols) + // Update peers data in PeerStore + this.registrar.peerStore.addressBook.set(id, listenAddrs.map((addr) => multiaddr(addr))) + this.registrar.peerStore.protoBook.set(id, protocols) - this.registrar.peerStore.replace(peerInfo) // TODO: Track our observed address so that we can score it log('received observed address of %s', observedAddr) } @@ -273,20 +238,16 @@ class IdentifyService { return log.error('received invalid message', err) } - // Update the listen addresses - const peerInfo = new PeerInfo(connection.remotePeer) - + // Update peers data in PeerStore + const id = connection.remotePeer try { - IdentifyService.updatePeerAddresses(peerInfo, message.listenAddrs) + this.registrar.peerStore.addressBook.set(id, message.listenAddrs.map((addr) => multiaddr(addr))) } catch (err) { return log.error('received invalid listen addrs', err) } // Update the protocols - IdentifyService.updatePeerProtocols(peerInfo, message.protocols) - - // Update the peer in the PeerStore - this.registrar.peerStore.replace(peerInfo) + this.registrar.peerStore.protoBook.set(id, message.protocols) } } diff --git a/src/index.js b/src/index.js index ef7168a7ae..1c4c587999 100644 --- a/src/index.js +++ b/src/index.js @@ -59,7 +59,7 @@ class Libp2p extends EventEmitter { localPeer: this.peerInfo.id, metrics: this.metrics, onConnection: (connection) => { - const peerInfo = this.peerStore.put(new PeerInfo(connection.remotePeer), { silent: true }) + const peerInfo = new PeerInfo(connection.remotePeer) this.registrar.onConnect(peerInfo, connection) this.connectionManager.onConnect(connection) this.emit('peer:connect', peerInfo) @@ -289,7 +289,11 @@ class Libp2p extends EventEmitter { const dialable = Dialer.getDialable(peer) let connection if (PeerInfo.isPeerInfo(dialable)) { - this.peerStore.put(dialable, { silent: true }) + // TODO Inconsistency from: getDialable adds a set, while regular peerInfo uses a Multiaddr set + // This should be handled on `peer-info` removal + const multiaddrs = dialable.multiaddrs.toArray ? dialable.multiaddrs.toArray() : Array.from(dialable.multiaddrs) + this.peerStore.addressBook.add(dialable.id, multiaddrs) + connection = this.registrar.getConnection(dialable) } @@ -328,7 +332,7 @@ class Libp2p extends EventEmitter { async ping (peer) { const peerInfo = await getPeerInfo(peer, this.peerStore) - return ping(this, peerInfo) + return ping(this, peerInfo.id) } /** @@ -430,7 +434,10 @@ class Libp2p extends EventEmitter { log.error(new Error(codes.ERR_DISCOVERED_SELF)) return } - this.peerStore.put(peerInfo) + + // TODO: once we deprecate peer-info, we should only set if we have data + this.peerStore.addressBook.add(peerInfo.id, peerInfo.multiaddrs.toArray()) + this.peerStore.protoBook.set(peerInfo.id, Array.from(peerInfo.protocols)) } /** diff --git a/src/peer-store/README.md b/src/peer-store/README.md index d9d79fe56a..bb309a0ddf 100644 --- a/src/peer-store/README.md +++ b/src/peer-store/README.md @@ -1,3 +1,89 @@ -# Peerstore +# PeerStore -WIP \ No newline at end of file +Libp2p's PeerStore is responsible for keeping an updated register with the relevant information of the known peers. It should be the single source of truth for all peer data, where a subsystem can learn about peers' data and where someone can listen for updates. The PeerStore comprises four main components: `addressBook`, `keyBook`, `protocolBook` and `metadataBook`. + +The PeerStore manages the high level operations on its inner books. Moreover, the PeerStore should be responsible for notifying interested parties of relevant events, through its Event Emitter. + +## Data gathering + +Several libp2p subsystems will perform operations, which will gather relevant information about peers. Some operations might not have this as an end goal, but can also gather important data. + +In a libp2p node's life, it will discover peers through its discovery protocols. In a typical discovery protocol, addresses of the peer are discovered along with its peer id. Once this happens, the PeerStore should collect this information for future (or immediate) usage by other subsystems. When the information is stored, the PeerStore should inform interested parties of the peer discovered (`peer` event). + +Taking into account a different scenario, a peer might perform/receive a dial request to/from a unkwown peer. In such a scenario, the PeerStore must store the peer's multiaddr once a connection is established. + +After a connection is established with a peer, the Identify protocol will run automatically. A stream is created and peers exchange their information (Multiaddrs, running protocols and their public key). Once this information is obtained, it should be added to the PeerStore. In this specific case, as we are speaking to the source of truth, we should ensure the PeerStore is prioritizing these records. If the recorded `multiaddrs` or `protocols` have changed, interested parties must be informed via the `change:multiaddrs` or `change:protocols` events respectively. + +In the background, the Identify Service is also waiting for protocol change notifications of peers via the IdentifyPush protocol. Peers may leverage the `identify-push` message to communicate protocol changes to all connected peers, so that their PeerStore can be updated with the updated protocols. As the `identify-push` also sends complete and updated information, the data in the PeerStore can be replaced. + +(To consider: Should we not replace until we get to multiaddr confidence? we might loose true information as we will talk with older nodes on the network.) + +While it is currently not supported in js-libp2p, future iterations may also support the [IdentifyDelta protocol](https://github.com/libp2p/specs/pull/176). + +It is also possible to gather relevant information for peers from other protocols / subsystems. For instance, in `DHT` operations, nodes can exchange peer data as part of the `DHT` operation. In this case, we can learn additional information about a peer we already know. In this scenario the PeerStore should not replace the existing data it has, just add it. + +## Data Consumption + +When the PeerStore data is updated, this information might be important for different parties. + +Every time a peer needs to dial another peer, it is essential that it knows the multiaddrs used by the peer, in order to perform a successful dial to it. The same is true for pinging a peer. While the `AddressBook` is going to keep its data updated, it will also emit `change:multiaddrs` events so that subsystems/users interested in knowing these changes can be notified instead of polling the `AddressBook`. + +Everytime a peer starts/stops supporting a protocol, libp2p subsystems or users might need to act accordingly. `js-libp2p` registrar orchestrates known peers, established connections and protocol topologies. This way, once a protocol is supported for a peer, the topology of that protocol should be informed that a new peer may be used and the subsystem can decide if it should open a new stream with that peer or not. For these situations, the `ProtoBook` will emit `change:protocols` events whenever supported protocols of a peer change. + +## PeerStore implementation + +The PeerStore wraps four main components: `addressBook`, `keyBook`, `protocolBook` and `metadataBook`. Moreover, it provides a high level API for those components, as well as data events. + +### Components + +#### Address Book + +The `addressBook` keeps the known multiaddrs of a peer. The multiaddrs of each peer may change over time and the Address Book must account for this. + +`Map` + +A `peerId.toString()` identifier mapping to a `multiaddrInfo` object, which should have the following structure: + +```js +{ + multiaddr: +} +``` + +#### Key Book + +The `keyBook` tracks the keys of the peers. + +**Not Yet Implemented** + +#### Protocol Book + +The `protoBook` holds the identifiers of the protocols supported by each peer. The protocols supported by each peer are dynamic and will change over time. + +`Map>` + +A `peerId.toString()` identifier mapping to a `Set` of protocol identifier strings. + +#### Metadata Book + +**Not Yet Implemented** + +### API + +For the complete API documentation, you should check the [API.md](../../doc/API.md). + +Access to its underlying books: + +- `peerStore.protoBook.*` +- `peerStore.addressBook.*` + +### Events + +- `peer` - emitted when a new peer is added. +- `change:multiaadrs` - emitted when a known peer has a different set of multiaddrs. +- `change:protocols` - emitted when a known peer supports a different set of protocols. + +## Future Considerations + +- If multiaddr TTLs are added, the PeerStore may schedule jobs to delete all addresses that exceed the TTL to prevent AddressBook bloating +- Further API methods will probably need to be added in the context of multiaddr validity and confidence. diff --git a/src/peer-store/address-book.js b/src/peer-store/address-book.js new file mode 100644 index 0000000000..445d731e1e --- /dev/null +++ b/src/peer-store/address-book.js @@ -0,0 +1,214 @@ +'use strict' + +const errcode = require('err-code') +const debug = require('debug') +const log = debug('libp2p:peer-store:address-book') +log.error = debug('libp2p:peer-store:address-book:error') + +const multiaddr = require('multiaddr') +const PeerId = require('peer-id') +const PeerInfo = require('peer-info') + +const Book = require('./book') + +const { + ERR_INVALID_PARAMETERS +} = require('../errors') + +/** + * The AddressBook is responsible for keeping the known multiaddrs + * of a peer. + */ +class AddressBook extends Book { + /** + * MultiaddrInfo object + * @typedef {Object} MultiaddrInfo + * @property {Multiaddr} multiaddr peer multiaddr. + */ + + /** + * @constructor + * @param {EventEmitter} peerStore + */ + constructor (peerStore) { + /** + * PeerStore Event emitter, used by the AddressBook to emit: + * "peer" - emitted when a peer is discovered by the node. + * "change:multiaddrs" - emitted when the known multiaddrs of a peer change. + */ + super(peerStore, 'change:multiaddrs', 'multiaddrs') + + /** + * Map known peers to their known multiaddrs. + * @type {Map>} + */ + this.data = new Map() + } + + /** + * Set known addresses of a provided peer. + * @override + * @param {PeerId} peerId + * @param {Array} addresses + * @returns {AddressBook} + */ + set (peerId, addresses) { + if (!PeerId.isPeerId(peerId)) { + log.error('peerId must be an instance of peer-id to store data') + throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS) + } + + const multiaddrInfos = this._toMultiaddrInfos(addresses) + const id = peerId.toB58String() + const rec = this.data.get(id) + + // Not replace multiaddrs + if (!multiaddrInfos.length) { + return this + } + + // Already knows the peer + if (rec && rec.length === multiaddrInfos.length) { + const intersection = rec.filter((mi) => multiaddrInfos.some((newMi) => mi.multiaddr.equals(newMi.multiaddr))) + + // Are new addresses equal to the old ones? + // If yes, no changes needed! + if (intersection.length === rec.length) { + log(`the addresses provided to store are equal to the already stored for ${id}`) + return this + } + } + + this.data.set(id, multiaddrInfos) + log(`stored provided multiaddrs for ${id}`) + + // TODO: Remove peerInfo and its usage on peer-info deprecate + const peerInfo = new PeerInfo(peerId) + multiaddrInfos.forEach((mi) => peerInfo.multiaddrs.add(mi.multiaddr)) + + // Notify the existance of a new peer + if (!rec) { + // this._ps.emit('peer', peerId) + this._ps.emit('peer', peerInfo) + } + + this._ps.emit('change:multiaddrs', { + peerId, + peerInfo, + multiaddrs: multiaddrInfos.map((mi) => mi.multiaddr) + }) + + return this + } + + /** + * Add known addresses of a provided peer. + * If the peer is not known, it is set with the given addresses. + * @override + * @param {PeerId} peerId + * @param {Array} addresses + * @returns {AddressBook} + */ + add (peerId, addresses) { + if (!PeerId.isPeerId(peerId)) { + log.error('peerId must be an instance of peer-id to store data') + throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS) + } + + const multiaddrInfos = this._toMultiaddrInfos(addresses) + const id = peerId.toB58String() + const rec = this.data.get(id) + + // Add recorded uniquely to the new array (Union) + rec && rec.forEach((mi) => { + if (!multiaddrInfos.find(r => r.multiaddr.equals(mi.multiaddr))) { + multiaddrInfos.push(mi) + } + }) + + // If the recorded length is equal to the new after the unique union + // The content is the same, no need to update. + if (rec && rec.length === multiaddrInfos.length) { + log(`the addresses provided to store are already stored for ${id}`) + return this + } + + this.data.set(id, multiaddrInfos) + + log(`added provided multiaddrs for ${id}`) + + // TODO: Remove peerInfo and its usage on peer-info deprecate + const peerInfo = new PeerInfo(peerId) + multiaddrInfos.forEach((mi) => peerInfo.multiaddrs.add(mi.multiaddr)) + + this._ps.emit('change:multiaddrs', { + peerId, + peerInfo, + multiaddrs: multiaddrInfos.map((mi) => mi.multiaddr) + }) + + // Notify the existance of a new peer + if (!rec) { + // this._ps.emit('peer', peerId) + this._ps.emit('peer', peerInfo) + } + + return this + } + + /** + * Transforms received multiaddrs into MultiaddrInfo. + * @param {Array} addresses + * @returns {Array} + */ + _toMultiaddrInfos (addresses) { + if (!addresses) { + log.error('addresses must be provided to store data') + throw errcode(new Error('addresses must be provided'), ERR_INVALID_PARAMETERS) + } + + // create MultiaddrInfo for each address + const multiaddrInfos = [] + addresses.forEach((addr) => { + if (!multiaddr.isMultiaddr(addr)) { + log.error(`multiaddr ${addr} must be an instance of multiaddr`) + throw errcode(new Error(`multiaddr ${addr} must be an instance of multiaddr`), ERR_INVALID_PARAMETERS) + } + + multiaddrInfos.push({ + multiaddr: addr + }) + }) + + return multiaddrInfos + } + + /** + * Get the known multiaddrs for a given peer. All returned multiaddrs + * will include the encapsulated `PeerId` of the peer. + * @param {PeerId} peerId + * @returns {Array} + */ + getMultiaddrsForPeer (peerId) { + if (!PeerId.isPeerId(peerId)) { + throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS) + } + + const record = this.data.get(peerId.toB58String()) + + if (!record) { + return undefined + } + + return record.map((multiaddrInfo) => { + const addr = multiaddrInfo.multiaddr + + const idString = addr.getPeerId() + if (idString && idString === peerId.toB58String()) return addr + + return addr.encapsulate(`/p2p/${peerId.toB58String()}`) + }) + } +} + +module.exports = AddressBook diff --git a/src/peer-store/book.js b/src/peer-store/book.js new file mode 100644 index 0000000000..32456b7c13 --- /dev/null +++ b/src/peer-store/book.js @@ -0,0 +1,87 @@ +'use strict' + +const errcode = require('err-code') +const PeerId = require('peer-id') +const PeerInfo = require('peer-info') + +const { + ERR_INVALID_PARAMETERS +} = require('../errors') + +/** + * The Book is the skeleton for the PeerStore books. + */ +class Book { + constructor (peerStore, eventName, eventProperty) { + this._ps = peerStore + this.eventName = eventName + this.eventProperty = eventProperty + + /** + * Map known peers to their data. + * @type {Map} + */ + this.data = new Map() + } + + /** + * Set known data of a provided peer. + * @param {PeerId} peerId + * @param {Array|Data} data + */ + set (peerId, data) { + throw errcode(new Error('set must be implemented by the subclass'), 'ERR_NOT_IMPLEMENTED') + } + + /** + * Add known data of a provided peer. + * @param {PeerId} peerId + * @param {Array|Data} data + */ + add (peerId, data) { + throw errcode(new Error('set must be implemented by the subclass'), 'ERR_NOT_IMPLEMENTED') + } + + /** + * Get the known data of a provided peer. + * @param {PeerId} peerId + * @returns {Array} + */ + get (peerId) { + if (!PeerId.isPeerId(peerId)) { + throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS) + } + + const rec = this.data.get(peerId.toB58String()) + + return rec ? [...rec] : undefined + } + + /** + * Deletes the provided peer from the book. + * @param {PeerId} peerId + * @returns {boolean} + */ + delete (peerId) { + if (!PeerId.isPeerId(peerId)) { + throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS) + } + + if (!this.data.delete(peerId.toB58String())) { + return false + } + + // TODO: Remove peerInfo and its usage on peer-info deprecate + const peerInfo = new PeerInfo(peerId) + + this._ps.emit(this.eventName, { + peerId, + peerInfo, + [this.eventProperty]: [] + }) + + return true + } +} + +module.exports = Book diff --git a/src/peer-store/index.js b/src/peer-store/index.js index 494d613d13..597b69e178 100644 --- a/src/peer-store/index.js +++ b/src/peer-store/index.js @@ -9,244 +9,212 @@ const { EventEmitter } = require('events') const PeerId = require('peer-id') const PeerInfo = require('peer-info') + +const AddressBook = require('./address-book') +const ProtoBook = require('./proto-book') + const { ERR_INVALID_PARAMETERS } = require('../errors') /** - * Responsible for managing known peers, as well as their addresses and metadata - * @fires PeerStore#peer Emitted when a peer is connected to this node - * @fires PeerStore#change:protocols - * @fires PeerStore#change:multiaddrs + * Responsible for managing known peers, as well as their addresses, protocols and metadata. + * @fires PeerStore#peer Emitted when a new peer is added. + * @fires PeerStore#change:protocols Emitted when a known peer supports a different set of protocols. + * @fires PeerStore#change:multiaddrs Emitted when a known peer has a different set of multiaddrs. */ class PeerStore extends EventEmitter { + /** + * PeerInfo object + * @typedef {Object} peerInfo + * @property {Array} multiaddrsInfos peer's information of the multiaddrs. + * @property {Array} protocols peer's supported protocols. + */ + constructor () { super() /** - * Map of peers - * - * @type {Map} + * AddressBook containing a map of peerIdStr to multiaddrsInfo */ - this.peers = new Map() + this.addressBook = new AddressBook(this) - // TODO: Track ourselves. We should split `peerInfo` up into its pieces so we get better - // control and observability. This will be the initial step for removing PeerInfo - // https://github.com/libp2p/go-libp2p-core/blob/master/peerstore/peerstore.go - // this.addressBook = new Map() - // this.protoBook = new Map() + /** + * ProtoBook containing a map of peerIdStr to supported protocols. + */ + this.protoBook = new ProtoBook(this) } + // TODO: Temporary adapter for modules using PeerStore + // This should be removed under a breaking change /** - * Stores the peerInfo of a new peer. - * If already exist, its info is updated. If `silent` is set to - * true, no 'peer' event will be emitted. This can be useful if you - * are already in the process of dialing the peer. The peer is technically - * known, but may not have been added to the PeerStore yet. + * Stores the peerInfo of a new peer on each book. * @param {PeerInfo} peerInfo * @param {object} [options] - * @param {boolean} [options.silent] (Default=false) + * @param {boolean} [options.replace = true] * @return {PeerInfo} */ - put (peerInfo, options = { silent: false }) { - if (!PeerInfo.isPeerInfo(peerInfo)) { - throw errcode(new Error('peerInfo must be an instance of peer-info'), ERR_INVALID_PARAMETERS) - } + put (peerInfo, options) { + const multiaddrs = peerInfo.multiaddrs.toArray() + const protocols = Array.from(peerInfo.protocols || new Set()) - let peer - // Already know the peer? - if (this.has(peerInfo.id)) { - peer = this.update(peerInfo) - } else { - peer = this.add(peerInfo) + this.addressBook.set(peerInfo.id, multiaddrs, options) + this.protoBook.set(peerInfo.id, protocols, options) - // Emit the peer if silent = false - !options.silent && this.emit('peer', peerInfo) - } - return peer - } + const peer = this.find(peerInfo.id) + const pInfo = new PeerInfo(peerInfo.id) - /** - * Add a new peer to the store. - * @param {PeerInfo} peerInfo - * @return {PeerInfo} - */ - add (peerInfo) { - if (!PeerInfo.isPeerInfo(peerInfo)) { - throw errcode(new Error('peerInfo must be an instance of peer-info'), ERR_INVALID_PARAMETERS) + if (!peer) { + return pInfo } - // Create new instance and add values to it - const newPeerInfo = new PeerInfo(peerInfo.id) - - peerInfo.multiaddrs.forEach((ma) => newPeerInfo.multiaddrs.add(ma)) - peerInfo.protocols.forEach((p) => newPeerInfo.protocols.add(p)) - - const connectedMa = peerInfo.isConnected() - connectedMa && newPeerInfo.connect(connectedMa) - - const peerProxy = new Proxy(newPeerInfo, { - set: (obj, prop, value) => { - if (prop === 'multiaddrs') { - this.emit('change:multiaddrs', { - peerInfo: obj, - multiaddrs: value.toArray() - }) - } else if (prop === 'protocols') { - this.emit('change:protocols', { - peerInfo: obj, - protocols: Array.from(value) - }) - } - return Reflect.set(...arguments) - } - }) + peer.protocols.forEach((p) => pInfo.protocols.add(p)) + peer.multiaddrInfos.forEach((mi) => pInfo.multiaddrs.add(mi.multiaddr)) - this.peers.set(peerInfo.id.toB58String(), peerProxy) - return peerProxy + return pInfo } + // TODO: Temporary adapter for modules using PeerStore + // This should be removed under a breaking change /** - * Updates an already known peer. - * @param {PeerInfo} peerInfo - * @return {PeerInfo} - */ - update (peerInfo) { - if (!PeerInfo.isPeerInfo(peerInfo)) { - throw errcode(new Error('peerInfo must be an instance of peer-info'), ERR_INVALID_PARAMETERS) - } - - const id = peerInfo.id.toB58String() - const recorded = this.peers.get(id) - - // pass active connection state - const ma = peerInfo.isConnected() - if (ma) { - recorded.connect(ma) - } - - // Verify new multiaddrs - // TODO: better track added and removed multiaddrs - const multiaddrsIntersection = [ - ...recorded.multiaddrs.toArray() - ].filter((m) => peerInfo.multiaddrs.has(m)) - - if (multiaddrsIntersection.length !== peerInfo.multiaddrs.size || - multiaddrsIntersection.length !== recorded.multiaddrs.size) { - for (const ma of peerInfo.multiaddrs.toArray()) { - recorded.multiaddrs.add(ma) - } - - this.emit('change:multiaddrs', { - peerInfo: recorded, - multiaddrs: recorded.multiaddrs.toArray() - }) - } - - // Update protocols - // TODO: better track added and removed protocols - const protocolsIntersection = new Set( - [...recorded.protocols].filter((p) => peerInfo.protocols.has(p)) - ) - - if (protocolsIntersection.size !== peerInfo.protocols.size || - protocolsIntersection.size !== recorded.protocols.size) { - for (const protocol of peerInfo.protocols) { - recorded.protocols.add(protocol) - } - - this.emit('change:protocols', { - peerInfo: recorded, - protocols: Array.from(recorded.protocols) - }) - } - - // Add the public key if missing - if (!recorded.id.pubKey && peerInfo.id.pubKey) { - recorded.id.pubKey = peerInfo.id.pubKey - } - - return recorded - } - - /** - * Get the info to the given id. - * @param {PeerId|string} peerId b58str id + * Get the info of the given id. + * @param {peerId} peerId * @returns {PeerInfo} */ get (peerId) { - // TODO: deprecate this and just accept `PeerId` instances - if (PeerId.isPeerId(peerId)) { - peerId = peerId.toB58String() - } + const peer = this.find(peerId) - return this.peers.get(peerId) + const pInfo = new PeerInfo(peerId) + peer.protocols.forEach((p) => pInfo.protocols.add(p)) + peer.multiaddrInfos.forEach((mi) => pInfo.multiaddrs.add(mi.multiaddr)) + + return pInfo } + // TODO: Temporary adapter for modules using PeerStore + // This should be removed under a breaking change /** * Has the info to the given id. - * @param {PeerId|string} peerId b58str id + * @param {PeerId} peerId * @returns {boolean} */ has (peerId) { - // TODO: deprecate this and just accept `PeerId` instances - if (PeerId.isPeerId(peerId)) { - peerId = peerId.toB58String() - } - - return this.peers.has(peerId) + return Boolean(this.find(peerId)) } + // TODO: Temporary adapter for modules using PeerStore + // This should be removed under a breaking change /** - * Removes the Peer with the matching `peerId` from the PeerStore - * @param {PeerId|string} peerId b58str id + * Removes the peer provided. + * @param {PeerId} peerId * @returns {boolean} true if found and removed */ remove (peerId) { - // TODO: deprecate this and just accept `PeerId` instances - if (PeerId.isPeerId(peerId)) { - peerId = peerId.toB58String() - } - - return this.peers.delete(peerId) + return this.delete(peerId) } + // TODO: Temporary adapter for modules using PeerStore + // This should be removed under a breaking change /** * Completely replaces the existing peers metadata with the given `peerInfo` * @param {PeerInfo} peerInfo * @returns {void} */ replace (peerInfo) { - if (!PeerInfo.isPeerInfo(peerInfo)) { - throw errcode(new Error('peerInfo must be an instance of peer-info'), ERR_INVALID_PARAMETERS) - } - - this.remove(peerInfo.id.toB58String()) - this.add(peerInfo) - - // This should be cleaned up in PeerStore v2 - this.emit('change:multiaddrs', { - peerInfo, - multiaddrs: peerInfo.multiaddrs.toArray() - }) - this.emit('change:protocols', { - peerInfo, - protocols: Array.from(peerInfo.protocols) - }) + this.put(peerInfo) } + // TODO: Temporary adapter for modules using PeerStore + // This should be removed under a breaking change /** * Returns the known multiaddrs for a given `PeerInfo`. All returned multiaddrs * will include the encapsulated `PeerId` of the peer. - * @param {PeerInfo} peer + * @param {PeerInfo} peerInfo * @returns {Array} */ - multiaddrsForPeer (peer) { - return this.put(peer, true).multiaddrs.toArray().map(addr => { - const idString = addr.getPeerId() - if (idString && idString === peer.id.toB58String()) return addr - return addr.encapsulate(`/p2p/${peer.id.toB58String()}`) - }) + multiaddrsForPeer (peerInfo) { + return this.addressBook.getMultiaddrsForPeer(peerInfo.id) + } + + /** + * Get all the stored information of every peer. + * @returns {Map} + */ + get peers () { + const peerInfos = new Map() + + // AddressBook + for (const [idStr, multiaddrInfos] of this.addressBook.data.entries()) { + // TODO: Remove peerInfo and its usage on peer-info deprecate + const peerInfo = new PeerInfo(PeerId.createFromCID(idStr)) + + multiaddrInfos.forEach((mi) => peerInfo.multiaddrs.add((mi.multiaddr))) + + const protocols = this.protoBook.data.get(idStr) || [] + protocols.forEach((p) => peerInfo.protocols.add(p)) + + peerInfos.set(idStr, peerInfo) + // TODO + // peerInfos.set(idStr, { + // id: PeerId.createFromCID(idStr), + // multiaddrInfos, + // protocols: this.protoBook.data.get(idStr) || [] + // }) + } + + // ProtoBook + for (const [idStr, protocols] of this.protoBook.data.entries()) { + // TODO: Remove peerInfo and its usage on peer-info deprecate + const peerInfo = peerInfos.get(idStr) + + if (!peerInfo) { + const peerInfo = new PeerInfo(PeerId.createFromCID(idStr)) + + protocols.forEach((p) => peerInfo.protocols.add(p)) + peerInfos.set(idStr, peerInfo) + // peerInfos.set(idStr, { + // id: PeerId.createFromCID(idStr), + // multiaddrInfos: [], + // protocols: protocols + // }) + } + } + + return peerInfos + } + + /** + * Delete the information of the given peer in every book. + * @param {PeerId} peerId + * @returns {boolean} true if found and removed + */ + delete (peerId) { + const addressesDeleted = this.addressBook.delete(peerId) + const protocolsDeleted = this.protoBook.delete(peerId) + return addressesDeleted || protocolsDeleted + } + + /** + * Find the stored information of a given peer. + * @param {PeerId} peerId + * @returns {peerInfo} + */ + find (peerId) { + if (!PeerId.isPeerId(peerId)) { + throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS) + } + + const multiaddrInfos = this.addressBook.get(peerId) + const protocols = this.protoBook.get(peerId) + + if (!multiaddrInfos && !protocols) { + return undefined + } + + return { + multiaddrInfos: multiaddrInfos || [], + protocols: protocols || [] + } } } diff --git a/src/peer-store/proto-book.js b/src/peer-store/proto-book.js new file mode 100644 index 0000000000..a5c5867ee8 --- /dev/null +++ b/src/peer-store/proto-book.js @@ -0,0 +1,137 @@ +'use strict' + +const errcode = require('err-code') +const debug = require('debug') +const log = debug('libp2p:peer-store:proto-book') +log.error = debug('libp2p:peer-store:proto-book:error') + +const PeerId = require('peer-id') +const PeerInfo = require('peer-info') + +const Book = require('./book') + +const { + ERR_INVALID_PARAMETERS +} = require('../errors') + +/** + * The ProtoBook is responsible for keeping the known supported + * protocols of a peer. + * @fires ProtoBook#change:protocols + */ +class ProtoBook extends Book { + /** + * @constructor + * @param {EventEmitter} peerStore + */ + constructor (peerStore) { + /** + * PeerStore Event emitter, used by the ProtoBook to emit: + * "change:protocols" - emitted when the known protocols of a peer change. + */ + super(peerStore, 'change:protocols', 'protocols') + + /** + * Map known peers to their known protocols. + * @type {Map>} + */ + this.data = new Map() + } + + /** + * Set known protocols of a provided peer. + * If the peer was not known before, it will be added. + * @override + * @param {PeerId} peerId + * @param {Array} protocols + * @returns {ProtoBook} + */ + set (peerId, protocols) { + if (!PeerId.isPeerId(peerId)) { + log.error('peerId must be an instance of peer-id to store data') + throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS) + } + + if (!protocols) { + log.error('protocols must be provided to store data') + throw errcode(new Error('protocols must be provided'), ERR_INVALID_PARAMETERS) + } + + const id = peerId.toB58String() + const recSet = this.data.get(id) + const newSet = new Set(protocols) + + const isSetEqual = (a, b) => a.size === b.size && [...a].every(value => b.has(value)) + + // Already knows the peer and the recorded protocols are the same? + // If yes, no changes needed! + if (recSet && isSetEqual(recSet, newSet)) { + log(`the protocols provided to store are equal to the already stored for ${id}`) + return this + } + + this.data.set(id, newSet) + log(`stored provided protocols for ${id}`) + + // TODO: Remove peerInfo and its usage on peer-info deprecate + const peerInfo = new PeerInfo(peerId) + protocols.forEach((p) => peerInfo.protocols.add(p)) + + this._ps.emit('change:protocols', { + peerId, + peerInfo, + protocols + }) + + return this + } + + /** + * Adds known protocols of a provided peer. + * If the peer was not known before, it will be added. + * @override + * @param {PeerId} peerId + * @param {Array} protocols + * @returns {ProtoBook} + */ + add (peerId, protocols) { + if (!PeerId.isPeerId(peerId)) { + log.error('peerId must be an instance of peer-id to store data') + throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS) + } + + if (!protocols) { + log.error('protocols must be provided to store data') + throw errcode(new Error('protocols must be provided'), ERR_INVALID_PARAMETERS) + } + + const id = peerId.toB58String() + const recSet = this.data.get(id) || new Set() + const newSet = new Set([...recSet, ...protocols]) // Set Union + + // Any new protocol added? + if (recSet.size === newSet.size) { + log(`the protocols provided to store are already stored for ${id}`) + return this + } + + protocols = [...newSet] + + this.data.set(id, newSet) + log(`added provided protocols for ${id}`) + + // TODO: Remove peerInfo and its usage on peer-info deprecate + const peerInfo = new PeerInfo(peerId) + protocols.forEach((p) => peerInfo.protocols.add(p)) + + this._ps.emit('change:protocols', { + peerId, + peerInfo, + protocols + }) + + return this + } +} + +module.exports = ProtoBook diff --git a/src/ping/index.js b/src/ping/index.js index d679a5c102..8590323056 100644 --- a/src/ping/index.js +++ b/src/ping/index.js @@ -15,11 +15,11 @@ const { PROTOCOL, PING_LENGTH } = require('./constants') /** * Ping a given peer and wait for its response, getting the operation latency. * @param {Libp2p} node - * @param {PeerInfo} peer + * @param {PeerId} peer * @returns {Promise} */ async function ping (node, peer) { - log('dialing %s to %s', PROTOCOL, peer.id.toB58String()) + log('dialing %s to %s', PROTOCOL, peer.toB58String()) const { stream } = await node.dialProtocol(peer, PROTOCOL) diff --git a/src/registrar.js b/src/registrar.js index cc24548516..2aa6bcf85e 100644 --- a/src/registrar.js +++ b/src/registrar.js @@ -10,7 +10,6 @@ const { } = require('./errors') const Topology = require('libp2p-interfaces/src/topology') const { Connection } = require('libp2p-interfaces/src/connection') -const PeerInfo = require('peer-info') /** * Responsible for notifying registered protocols of events in the network. @@ -22,6 +21,7 @@ class Registrar { * @constructor */ constructor ({ peerStore }) { + // Used on topology to listen for protocol changes this.peerStore = peerStore /** @@ -74,9 +74,11 @@ class Registrar { * @returns {void} */ onConnect (peerInfo, conn) { - if (!PeerInfo.isPeerInfo(peerInfo)) { - throw errcode(new Error('peerInfo must be an instance of peer-info'), ERR_INVALID_PARAMETERS) - } + // TODO: This is not a `peer-info` instance anymore, but an object with the data. + // This can be modified to `peer-id` though, once `peer-info` is deprecated. + // if (!PeerInfo.isPeerInfo(peerInfo)) { + // throw errcode(new Error('peerInfo must be an instance of peer-info'), ERR_INVALID_PARAMETERS) + // } if (!Connection.isConnection(conn)) { throw errcode(new Error('conn must be an instance of interface-connection'), ERR_INVALID_PARAMETERS) @@ -101,9 +103,11 @@ class Registrar { * @returns {void} */ onDisconnect (peerInfo, connection, error) { - if (!PeerInfo.isPeerInfo(peerInfo)) { - throw errcode(new Error('peerInfo must be an instance of peer-info'), ERR_INVALID_PARAMETERS) - } + // TODO: This is not a `peer-info` instance anymore, but an object with the data. + // This can be modified to `peer-id` though, once `peer-info` is deprecated. + // if (!PeerInfo.isPeerInfo(peerInfo)) { + // throw errcode(new Error('peerInfo must be an instance of peer-info'), ERR_INVALID_PARAMETERS) + // } const id = peerInfo.id.toB58String() let storedConn = this.connections.get(id) @@ -126,9 +130,11 @@ class Registrar { * @returns {Connection} */ getConnection (peerInfo) { - if (!PeerInfo.isPeerInfo(peerInfo)) { - throw errcode(new Error('peerInfo must be an instance of peer-info'), ERR_INVALID_PARAMETERS) - } + // TODO: This is not a `peer-info` instance anymore, but an object with the data. + // This can be modified to `peer-id` though, once `peer-info` is deprecated. + // if (!PeerInfo.isPeerInfo(peerInfo)) { + // throw errcode(new Error('peerInfo must be an instance of peer-info'), ERR_INVALID_PARAMETERS) + // } const connections = this.connections.get(peerInfo.id.toB58String()) // Return the first, open connection diff --git a/src/upgrader.js b/src/upgrader.js index 25cb6d3f11..ad2391971f 100644 --- a/src/upgrader.js +++ b/src/upgrader.js @@ -317,7 +317,7 @@ class Upgrader { * Attempts to encrypt the incoming `connection` with the provided `cryptos`. * @private * @async - * @param {PeerId} localPeer The initiators PeerInfo + * @param {PeerId} localPeer The initiators PeerId * @param {*} connection * @param {Map} cryptos * @returns {CryptoResult} An encrypted connection, remote peer `PeerId` and the protocol of the `Crypto` used @@ -346,7 +346,7 @@ class Upgrader { * The first `Crypto` module to succeed will be used * @private * @async - * @param {PeerId} localPeer The initiators PeerInfo + * @param {PeerId} localPeer The initiators PeerId * @param {*} connection * @param {PeerId} remotePeerId * @param {Map} cryptos diff --git a/test/content-routing/dht/operation.node.js b/test/content-routing/dht/operation.node.js index 8520b28548..538d8b326f 100644 --- a/test/content-routing/dht/operation.node.js +++ b/test/content-routing/dht/operation.node.js @@ -43,7 +43,8 @@ describe('DHT subsystem operates correctly', () => { remoteLibp2p.start() ]) - remAddr = libp2p.peerStore.multiaddrsForPeer(remotePeerInfo)[0] + libp2p.peerStore.addressBook.set(remotePeerInfo.id, [remoteListenAddr]) + remAddr = libp2p.peerStore.addressBook.getMultiaddrsForPeer(remotePeerInfo.id)[0] }) afterEach(() => Promise.all([ @@ -67,7 +68,6 @@ describe('DHT subsystem operates correctly', () => { const value = Buffer.from('world') await libp2p.dialProtocol(remAddr, subsystemMulticodecs) - await Promise.all([ pWaitFor(() => libp2p._dht.routingTable.size === 1), pWaitFor(() => remoteLibp2p._dht.routingTable.size === 1) @@ -98,7 +98,8 @@ describe('DHT subsystem operates correctly', () => { await libp2p.start() await remoteLibp2p.start() - remAddr = libp2p.peerStore.multiaddrsForPeer(remotePeerInfo)[0] + libp2p.peerStore.addressBook.set(remotePeerInfo.id, [remoteListenAddr]) + remAddr = libp2p.peerStore.addressBook.getMultiaddrsForPeer(remotePeerInfo.id)[0] }) afterEach(() => Promise.all([ diff --git a/test/dialing/direct.node.js b/test/dialing/direct.node.js index c846e8ad68..734687d0a2 100644 --- a/test/dialing/direct.node.js +++ b/test/dialing/direct.node.js @@ -99,7 +99,10 @@ describe('Dialing (direct, TCP)', () => { const dialer = new Dialer({ transportManager: localTM, peerStore: { - multiaddrsForPeer: () => [remoteAddr] + addressBook: { + add: () => {}, + getMultiaddrsForPeer: () => [remoteAddr] + } } }) const peerId = await PeerId.createFromJSON(Peers[0]) @@ -120,7 +123,7 @@ describe('Dialing (direct, TCP)', () => { const peerId = await PeerId.createFromJSON(Peers[0]) const peerInfo = new PeerInfo(peerId) peerInfo.multiaddrs.add(remoteAddr) - peerStore.put(peerInfo) + peerStore.addressBook.set(peerInfo.id, peerInfo.multiaddrs.toArray()) const connection = await dialer.connectToPeer(peerInfo) expect(connection).to.exist() @@ -131,7 +134,10 @@ describe('Dialing (direct, TCP)', () => { const dialer = new Dialer({ transportManager: localTM, peerStore: { - multiaddrsForPeer: () => [unsupportedAddr] + addressBook: { + add: () => {}, + getMultiaddrsForPeer: () => [unsupportedAddr] + } } }) const peerId = await PeerId.createFromJSON(Peers[0]) @@ -172,7 +178,10 @@ describe('Dialing (direct, TCP)', () => { transportManager: localTM, concurrency: 2, peerStore: { - multiaddrsForPeer: () => addrs + addressBook: { + add: () => {}, + getMultiaddrsForPeer: () => addrs + } } }) diff --git a/test/dialing/direct.spec.js b/test/dialing/direct.spec.js index f6c716cbd1..3d4041120b 100644 --- a/test/dialing/direct.spec.js +++ b/test/dialing/direct.spec.js @@ -87,7 +87,10 @@ describe('Dialing (direct, WebSockets)', () => { const dialer = new Dialer({ transportManager: localTM, peerStore: { - multiaddrsForPeer: () => [remoteAddr] + addressBook: { + add: () => {}, + getMultiaddrsForPeer: () => [remoteAddr] + } } }) @@ -100,7 +103,10 @@ describe('Dialing (direct, WebSockets)', () => { const dialer = new Dialer({ transportManager: localTM, peerStore: { - multiaddrsForPeer: () => [remoteAddr] + addressBook: { + add: () => {}, + getMultiaddrsForPeer: () => [remoteAddr] + } } }) @@ -121,7 +127,10 @@ describe('Dialing (direct, WebSockets)', () => { const dialer = new Dialer({ transportManager: localTM, peerStore: { - multiaddrsForPeer: () => [remoteAddr] + addressBook: { + add: () => {}, + getMultiaddrsForPeer: () => [remoteAddr] + } } }) const peerId = await PeerId.createFromJSON(Peers[0]) @@ -135,7 +144,10 @@ describe('Dialing (direct, WebSockets)', () => { const dialer = new Dialer({ transportManager: localTM, peerStore: { - multiaddrsForPeer: () => [unsupportedAddr] + addressBook: { + set: () => {}, + getMultiaddrsForPeer: () => [unsupportedAddr] + } } }) const peerId = await PeerId.createFromJSON(Peers[0]) @@ -150,7 +162,10 @@ describe('Dialing (direct, WebSockets)', () => { transportManager: localTM, timeout: 50, peerStore: { - multiaddrsForPeer: () => [remoteAddr] + addressBook: { + add: () => {}, + getMultiaddrsForPeer: () => [remoteAddr] + } } }) sinon.stub(localTM, 'dial').callsFake(async (addr, options) => { @@ -172,7 +187,10 @@ describe('Dialing (direct, WebSockets)', () => { transportManager: localTM, concurrency: 2, peerStore: { - multiaddrsForPeer: () => [remoteAddr, remoteAddr, remoteAddr] + addressBook: { + set: () => {}, + getMultiaddrsForPeer: () => [remoteAddr, remoteAddr, remoteAddr] + } } }) @@ -208,7 +226,10 @@ describe('Dialing (direct, WebSockets)', () => { transportManager: localTM, concurrency: 2, peerStore: { - multiaddrsForPeer: () => [remoteAddr, remoteAddr, remoteAddr] + addressBook: { + set: () => {}, + getMultiaddrsForPeer: () => [remoteAddr, remoteAddr, remoteAddr] + } } }) @@ -316,7 +337,7 @@ describe('Dialing (direct, WebSockets)', () => { }) sinon.spy(libp2p.dialer, 'connectToPeer') - sinon.spy(libp2p.peerStore, 'put') + sinon.spy(libp2p.peerStore.addressBook, 'add') const connection = await libp2p.dial(remoteAddr) expect(connection).to.exist() @@ -325,7 +346,7 @@ describe('Dialing (direct, WebSockets)', () => { expect(protocol).to.equal('/echo/1.0.0') await connection.close() expect(libp2p.dialer.connectToPeer.callCount).to.equal(1) - expect(libp2p.peerStore.put.callCount).to.be.at.least(1) + expect(libp2p.peerStore.addressBook.add.callCount).to.be.at.least(1) }) it('should run identify automatically after connecting', async () => { @@ -339,19 +360,22 @@ describe('Dialing (direct, WebSockets)', () => { }) sinon.spy(libp2p.identifyService, 'identify') - sinon.spy(libp2p.peerStore, 'replace') sinon.spy(libp2p.upgrader, 'onConnection') const connection = await libp2p.dial(remoteAddr) expect(connection).to.exist() + sinon.spy(libp2p.peerStore.addressBook, 'set') + sinon.spy(libp2p.peerStore.protoBook, 'set') + // Wait for onConnection to be called await pWaitFor(() => libp2p.upgrader.onConnection.callCount === 1) expect(libp2p.identifyService.identify.callCount).to.equal(1) await libp2p.identifyService.identify.firstCall.returnValue - expect(libp2p.peerStore.replace.callCount).to.equal(1) + expect(libp2p.peerStore.addressBook.set.callCount).to.equal(1) + expect(libp2p.peerStore.protoBook.set.callCount).to.equal(1) }) it('should be able to use hangup to close connections', async () => { diff --git a/test/dialing/relay.node.js b/test/dialing/relay.node.js index c7e4919dc6..5fd0d8fca1 100644 --- a/test/dialing/relay.node.js +++ b/test/dialing/relay.node.js @@ -11,6 +11,8 @@ const multiaddr = require('multiaddr') const { collect } = require('streaming-iterables') const pipe = require('it-pipe') const AggregateError = require('aggregate-error') +const PeerId = require('peer-id') + const { createPeerInfo } = require('../utils/creators/peer') const baseOptions = require('../utils/base-options') const Libp2p = require('../../src') @@ -51,8 +53,9 @@ describe('Dialing (via relay, TCP)', () => { return Promise.all([srcLibp2p, relayLibp2p, dstLibp2p].map(async libp2p => { await libp2p.stop() // Clear the peer stores - for (const peerId of libp2p.peerStore.peers.keys()) { - libp2p.peerStore.remove(peerId) + for (const peerIdStr of libp2p.peerStore.peers.keys()) { + const peerId = PeerId.createFromCID(peerIdStr) + libp2p.peerStore.delete(peerId) } })) }) diff --git a/test/identify/index.spec.js b/test/identify/index.spec.js index ca32e023f2..7af2f67c84 100644 --- a/test/identify/index.spec.js +++ b/test/identify/index.spec.js @@ -48,7 +48,12 @@ describe('Identify', () => { protocols, registrar: { peerStore: { - replace: () => {} + addressBook: { + set: () => { } + }, + protoBook: { + set: () => { } + } } } }) @@ -64,7 +69,8 @@ describe('Identify', () => { const [local, remote] = duplexPair() sinon.stub(localConnectionMock, 'newStream').returns({ stream: local, protocol: multicodecs.IDENTIFY }) - sinon.spy(localIdentify.registrar.peerStore, 'replace') + sinon.spy(localIdentify.registrar.peerStore.addressBook, 'set') + sinon.spy(localIdentify.registrar.peerStore.protoBook, 'set') // Run identify await Promise.all([ @@ -76,9 +82,10 @@ describe('Identify', () => { }) ]) - expect(localIdentify.registrar.peerStore.replace.callCount).to.equal(1) + expect(localIdentify.registrar.peerStore.addressBook.set.callCount).to.equal(1) + expect(localIdentify.registrar.peerStore.protoBook.set.callCount).to.equal(1) // Validate the remote peer gets updated in the peer store - const call = localIdentify.registrar.peerStore.replace.firstCall + const call = localIdentify.registrar.peerStore.addressBook.set.firstCall expect(call.args[0].id.bytes).to.equal(remotePeer.id.bytes) }) @@ -88,7 +95,12 @@ describe('Identify', () => { protocols, registrar: { peerStore: { - replace: () => {} + addressBook: { + set: () => { } + }, + protoBook: { + set: () => { } + } } } }) @@ -134,7 +146,12 @@ describe('Identify', () => { peerInfo: remotePeer, registrar: { peerStore: { - replace: () => {} + addressBook: { + set: () => {} + }, + protoBook: { + set: () => { } + } } } }) @@ -152,9 +169,8 @@ describe('Identify', () => { const [local, remote] = duplexPair() sinon.stub(localConnectionMock, 'newStream').returns({ stream: local, protocol: multicodecs.IDENTIFY_PUSH }) - sinon.spy(IdentifyService, 'updatePeerAddresses') - sinon.spy(IdentifyService, 'updatePeerProtocols') - sinon.spy(remoteIdentify.registrar.peerStore, 'replace') + sinon.spy(remoteIdentify.registrar.peerStore.addressBook, 'set') + sinon.spy(remoteIdentify.registrar.peerStore.protoBook, 'set') // Run identify await Promise.all([ @@ -166,14 +182,14 @@ describe('Identify', () => { }) ]) - expect(IdentifyService.updatePeerAddresses.callCount).to.equal(1) - expect(IdentifyService.updatePeerProtocols.callCount).to.equal(1) - - expect(remoteIdentify.registrar.peerStore.replace.callCount).to.equal(1) - const [peerInfo] = remoteIdentify.registrar.peerStore.replace.firstCall.args - expect(peerInfo.id.bytes).to.eql(localPeer.id.bytes) - expect(peerInfo.multiaddrs.toArray()).to.eql([listeningAddr]) - expect(peerInfo.protocols).to.eql(localProtocols) + expect(remoteIdentify.registrar.peerStore.addressBook.set.callCount).to.equal(1) + expect(remoteIdentify.registrar.peerStore.protoBook.set.callCount).to.equal(1) + const [peerId, multiaddrs] = remoteIdentify.registrar.peerStore.addressBook.set.firstCall.args + expect(peerId.bytes).to.eql(localPeer.id.bytes) + expect(multiaddrs).to.eql([listeningAddr]) + const [peerId2, protocols] = remoteIdentify.registrar.peerStore.protoBook.set.firstCall.args + expect(peerId2.bytes).to.eql(localPeer.id.bytes) + expect(protocols).to.eql(Array.from(localProtocols)) }) }) @@ -204,13 +220,15 @@ describe('Identify', () => { }) sinon.spy(libp2p.identifyService, 'identify') - const peerStoreSpy = sinon.spy(libp2p.peerStore, 'replace') + const peerStoreSpySet = sinon.spy(libp2p.peerStore.addressBook, 'set') + const peerStoreSpyAdd = sinon.spy(libp2p.peerStore.addressBook, 'add') const connection = await libp2p.dialer.connectToPeer(remoteAddr) expect(connection).to.exist() // Wait for peer store to be updated - await pWaitFor(() => peerStoreSpy.callCount === 1) + // Dialer._createDialTarget (add), Identify (replace) + await pWaitFor(() => peerStoreSpySet.callCount === 1 && peerStoreSpyAdd.callCount === 1) expect(libp2p.identifyService.identify.callCount).to.equal(1) // The connection should have no open streams @@ -226,7 +244,6 @@ describe('Identify', () => { sinon.spy(libp2p.identifyService, 'identify') sinon.spy(libp2p.identifyService, 'push') - sinon.spy(libp2p.peerStore, 'update') const connection = await libp2p.dialer.connectToPeer(remoteAddr) expect(connection).to.exist() diff --git a/test/peer-discovery/index.spec.js b/test/peer-discovery/index.spec.js index 8873cd76cc..518cd68f71 100644 --- a/test/peer-discovery/index.spec.js +++ b/test/peer-discovery/index.spec.js @@ -36,7 +36,9 @@ describe('peer discovery', () => { ...baseOptions, peerInfo }) - libp2p.peerStore.add(remotePeerInfo) + libp2p.peerStore.addressBook.set(remotePeerInfo.id, remotePeerInfo.multiaddrs.toArray()) + libp2p.peerStore.protoBook.set(remotePeerInfo.id, Array.from(remotePeerInfo.protocols)) + const deferred = defer() sinon.stub(libp2p.dialer, 'connectToPeer').callsFake((remotePeerInfo) => { expect(remotePeerInfo).to.equal(remotePeerInfo) @@ -47,7 +49,9 @@ describe('peer discovery', () => { libp2p.start() await deferred.promise - expect(spy.getCall(0).args).to.eql([remotePeerInfo]) + + expect(spy.calledOnce).to.eql(true) + expect(spy.getCall(0).args[0].id.toString()).to.eql(remotePeerInfo.id.toString()) }) it('should ignore self on discovery', async () => { diff --git a/test/peer-store/address-book.spec.js b/test/peer-store/address-book.spec.js new file mode 100644 index 0000000000..1ec11eb742 --- /dev/null +++ b/test/peer-store/address-book.spec.js @@ -0,0 +1,365 @@ +'use strict' +/* eslint-env mocha */ + +const chai = require('chai') +chai.use(require('dirty-chai')) +const { expect } = chai + +const { EventEmitter } = require('events') +const pDefer = require('p-defer') +const multiaddr = require('multiaddr') + +const AddressBook = require('../../src/peer-store/address-book') + +const peerUtils = require('../utils/creators/peer') +const { + ERR_INVALID_PARAMETERS +} = require('../../src/errors') + +const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') +const addr2 = multiaddr('/ip4/127.0.0.1/tcp/8001') +const addr3 = multiaddr('/ip4/127.0.0.1/tcp/8002') + +const arraysAreEqual = (a, b) => a.length === b.length && a.sort().every((item, index) => b[index] === item) + +describe('addressBook', () => { + let peerId + + before(async () => { + [peerId] = await peerUtils.createPeerId() + }) + + describe('addressBook.set', () => { + let ee, ab + + beforeEach(() => { + ee = new EventEmitter() + ab = new AddressBook(ee) + }) + + afterEach(() => { + ee.removeAllListeners() + }) + + it('throwns invalid parameters error if invalid PeerId is provided', () => { + expect(() => { + ab.set('invalid peerId') + }).to.throw(ERR_INVALID_PARAMETERS) + }) + + it('throwns invalid parameters error if no addresses provided', () => { + expect(() => { + ab.set(peerId) + }).to.throw(ERR_INVALID_PARAMETERS) + }) + + it('throwns invalid parameters error if invalid multiaddrs are provided', () => { + expect(() => { + ab.set(peerId, 'invalid multiaddr') + }).to.throw(ERR_INVALID_PARAMETERS) + }) + + it('replaces the stored content by default and emit change event', () => { + const defer = pDefer() + const supportedMultiaddrs = [addr1, addr2] + + ee.once('change:multiaddrs', ({ peerId, multiaddrs }) => { + expect(peerId).to.exist() + expect(multiaddrs).to.eql(supportedMultiaddrs) + defer.resolve() + }) + + ab.set(peerId, supportedMultiaddrs) + const multiaddrInfos = ab.get(peerId) + const multiaddrs = multiaddrInfos.map((mi) => mi.multiaddr) + expect(multiaddrs).to.have.deep.members(supportedMultiaddrs) + + return defer.promise + }) + + it('emits on set if not storing the exact same content', async () => { + const defer = pDefer() + + const supportedMultiaddrsA = [addr1, addr2] + const supportedMultiaddrsB = [addr2] + + let changeCounter = 0 + ee.on('change:multiaddrs', () => { + changeCounter++ + if (changeCounter > 1) { + defer.resolve() + } + }) + + // set 1 + ab.set(peerId, supportedMultiaddrsA) + + // set 2 (same content) + ab.set(peerId, supportedMultiaddrsB) + const multiaddrInfos = ab.get(peerId) + const multiaddrs = multiaddrInfos.map((mi) => mi.multiaddr) + expect(multiaddrs).to.have.deep.members(supportedMultiaddrsB) + + await defer.promise + }) + + it('does not emit on set if it is storing the exact same content', async () => { + const defer = pDefer() + + const supportedMultiaddrs = [addr1, addr2] + + let changeCounter = 0 + ee.on('change:multiaddrs', () => { + changeCounter++ + if (changeCounter > 1) { + defer.reject() + } + }) + + // set 1 + ab.set(peerId, supportedMultiaddrs) + + // set 2 (same content) + ab.set(peerId, supportedMultiaddrs) + + // Wait 50ms for incorrect second event + setTimeout(() => { + defer.resolve() + }, 50) + + await defer.promise + }) + }) + + describe('addressBook.add', () => { + let ee, ab + + beforeEach(() => { + ee = new EventEmitter() + ab = new AddressBook(ee) + }) + + afterEach(() => { + ee.removeAllListeners() + }) + + it('throwns invalid parameters error if invalid PeerId is provided', () => { + expect(() => { + ab.add('invalid peerId') + }).to.throw(ERR_INVALID_PARAMETERS) + }) + + it('throwns invalid parameters error if no addresses provided', () => { + expect(() => { + ab.add(peerId) + }).to.throw(ERR_INVALID_PARAMETERS) + }) + + it('throwns invalid parameters error if invalid multiaddrs are provided', () => { + expect(() => { + ab.add(peerId, 'invalid multiaddr') + }).to.throw(ERR_INVALID_PARAMETERS) + }) + + it('adds the new content and emits change event', () => { + const defer = pDefer() + + const supportedMultiaddrsA = [addr1, addr2] + const supportedMultiaddrsB = [addr3] + const finalMultiaddrs = supportedMultiaddrsA.concat(supportedMultiaddrsB) + + let changeTrigger = 2 + ee.on('change:multiaddrs', ({ multiaddrs }) => { + changeTrigger-- + if (changeTrigger === 0 && arraysAreEqual(multiaddrs, finalMultiaddrs)) { + defer.resolve() + } + }) + + // Replace + ab.set(peerId, supportedMultiaddrsA) + let multiaddrInfos = ab.get(peerId) + let multiaddrs = multiaddrInfos.map((mi) => mi.multiaddr) + expect(multiaddrs).to.have.deep.members(supportedMultiaddrsA) + + // Add + ab.add(peerId, supportedMultiaddrsB) + multiaddrInfos = ab.get(peerId) + multiaddrs = multiaddrInfos.map((mi) => mi.multiaddr) + expect(multiaddrs).to.have.deep.members(finalMultiaddrs) + + return defer.promise + }) + + it('emits on add if the content to add not exists', async () => { + const defer = pDefer() + + const supportedMultiaddrsA = [addr1] + const supportedMultiaddrsB = [addr2] + const finalMultiaddrs = supportedMultiaddrsA.concat(supportedMultiaddrsB) + + let changeCounter = 0 + ee.on('change:multiaddrs', () => { + changeCounter++ + if (changeCounter > 1) { + defer.resolve() + } + }) + + // set 1 + ab.set(peerId, supportedMultiaddrsA) + + // set 2 (content already existing) + ab.add(peerId, supportedMultiaddrsB) + const multiaddrInfos = ab.get(peerId) + const multiaddrs = multiaddrInfos.map((mi) => mi.multiaddr) + expect(multiaddrs).to.have.deep.members(finalMultiaddrs) + + await defer.promise + }) + + it('does not emit on add if the content to add already exists', async () => { + const defer = pDefer() + + const supportedMultiaddrsA = [addr1, addr2] + const supportedMultiaddrsB = [addr2] + + let changeCounter = 0 + ee.on('change:multiaddrs', () => { + changeCounter++ + if (changeCounter > 1) { + defer.reject() + } + }) + + // set 1 + ab.set(peerId, supportedMultiaddrsA) + + // set 2 (content already existing) + ab.add(peerId, supportedMultiaddrsB) + + // Wait 50ms for incorrect second event + setTimeout(() => { + defer.resolve() + }, 50) + + await defer.promise + }) + }) + + describe('addressBook.get', () => { + let ee, ab + + beforeEach(() => { + ee = new EventEmitter() + ab = new AddressBook(ee) + }) + + it('throwns invalid parameters error if invalid PeerId is provided', () => { + expect(() => { + ab.get('invalid peerId') + }).to.throw(ERR_INVALID_PARAMETERS) + }) + + it('returns undefined if no multiaddrs are known for the provided peer', () => { + const multiaddrInfos = ab.get(peerId) + + expect(multiaddrInfos).to.not.exist() + }) + + it('returns the multiaddrs stored', () => { + const supportedMultiaddrs = [addr1, addr2] + + ab.set(peerId, supportedMultiaddrs) + + const multiaddrInfos = ab.get(peerId) + const multiaddrs = multiaddrInfos.map((mi) => mi.multiaddr) + expect(multiaddrs).to.have.deep.members(supportedMultiaddrs) + }) + }) + + describe('addressBook.getMultiaddrsForPeer', () => { + let ee, ab + + beforeEach(() => { + ee = new EventEmitter() + ab = new AddressBook(ee) + }) + + it('throwns invalid parameters error if invalid PeerId is provided', () => { + expect(() => { + ab.getMultiaddrsForPeer('invalid peerId') + }).to.throw(ERR_INVALID_PARAMETERS) + }) + + it('returns undefined if no multiaddrs are known for the provided peer', () => { + const multiaddrInfos = ab.getMultiaddrsForPeer(peerId) + + expect(multiaddrInfos).to.not.exist() + }) + + it('returns the multiaddrs stored', () => { + const supportedMultiaddrs = [addr1, addr2] + + ab.set(peerId, supportedMultiaddrs) + + const multiaddrs = ab.getMultiaddrsForPeer(peerId) + multiaddrs.forEach((m) => { + expect(m.getPeerId()).to.equal(peerId.toB58String()) + }) + }) + }) + + describe('addressBook.delete', () => { + let ee, ab + + beforeEach(() => { + ee = new EventEmitter() + ab = new AddressBook(ee) + }) + + it('throwns invalid parameters error if invalid PeerId is provided', () => { + expect(() => { + ab.delete('invalid peerId') + }).to.throw(ERR_INVALID_PARAMETERS) + }) + + it('returns false if no records exist for the peer and no event is emitted', () => { + const defer = pDefer() + + ee.on('change:multiaddrs', () => { + defer.reject() + }) + + const deleted = ab.delete(peerId) + + expect(deleted).to.equal(false) + + // Wait 50ms for incorrect invalid event + setTimeout(() => { + defer.resolve() + }, 50) + + return defer.promise + }) + + it('returns true if the record exists and an event is emitted', () => { + const defer = pDefer() + + const supportedMultiaddrs = [addr1, addr2] + ab.set(peerId, supportedMultiaddrs) + + // Listen after set + ee.on('change:multiaddrs', ({ multiaddrs }) => { + expect(multiaddrs.length).to.eql(0) + defer.resolve() + }) + + const deleted = ab.delete(peerId) + + expect(deleted).to.equal(true) + + return defer.promise + }) + }) +}) diff --git a/test/peer-store/peer-store.spec.js b/test/peer-store/peer-store.spec.js index 91628432cc..411977f86d 100644 --- a/test/peer-store/peer-store.spec.js +++ b/test/peer-store/peer-store.spec.js @@ -4,185 +4,147 @@ const chai = require('chai') chai.use(require('dirty-chai')) const { expect } = chai -const sinon = require('sinon') - -const pDefer = require('p-defer') const PeerStore = require('../../src/peer-store') const multiaddr = require('multiaddr') + const peerUtils = require('../utils/creators/peer') -const addr = multiaddr('/ip4/127.0.0.1/tcp/8000') +const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') +const addr2 = multiaddr('/ip4/127.0.0.1/tcp/8001') +const addr3 = multiaddr('/ip4/127.0.0.1/tcp/8002') +const addr4 = multiaddr('/ip4/127.0.0.1/tcp/8003') -describe('peer-store', () => { - let peerStore +const proto1 = '/protocol1' +const proto2 = '/protocol2' +const proto3 = '/protocol3' - beforeEach(() => { - peerStore = new PeerStore() +describe('peer-store', () => { + let peerIds + before(async () => { + peerIds = await peerUtils.createPeerId({ + number: 4 + }) }) - it('should add a new peer and emit it when it does not exist', async () => { - const defer = pDefer() + describe('empty books', () => { + let peerStore - sinon.spy(peerStore, 'put') - sinon.spy(peerStore, 'add') - sinon.spy(peerStore, 'update') - - const [peerInfo] = await peerUtils.createPeerInfo() + beforeEach(() => { + peerStore = new PeerStore() + }) - peerStore.on('peer', (peer) => { - expect(peer).to.exist() - defer.resolve() + it('has an empty map of peers', () => { + const peers = peerStore.peers + expect(peers.size).to.equal(0) }) - peerStore.put(peerInfo) - // Wait for peerStore to emit the peer - await defer.promise + it('returns false on trying to delete a non existant peerId', () => { + const deleted = peerStore.delete(peerIds[0]) + expect(deleted).to.equal(false) + }) - expect(peerStore.put.callCount).to.equal(1) - expect(peerStore.add.callCount).to.equal(1) - expect(peerStore.update.callCount).to.equal(0) + it('returns undefined on trying to find a non existant peerId', () => { + const peerInfo = peerStore.find(peerIds[0]) + expect(peerInfo).to.not.exist() + }) }) - it('should update peer when it is already in the store', async () => { - const [peerInfo] = await peerUtils.createPeerInfo() + describe('previously populated books', () => { + let peerStore - // Put the peer in the store - peerStore.put(peerInfo) + beforeEach(() => { + peerStore = new PeerStore() - sinon.spy(peerStore, 'add') - sinon.spy(peerStore, 'update') + // Add peer0 with { addr1, addr2 } and { proto1 } + peerStore.addressBook.set(peerIds[0], [addr1, addr2]) + peerStore.protoBook.set(peerIds[0], [proto1]) - // When updating, peer event must not be emitted - peerStore.on('peer', () => { - throw new Error('should not emit twice') - }) - // If no multiaddrs change, the event should not be emitted - peerStore.on('change:multiaddrs', () => { - throw new Error('should not emit change:multiaddrs') - }) - // If no protocols change, the event should not be emitted - peerStore.on('change:protocols', () => { - throw new Error('should not emit change:protocols') - }) - - peerStore.put(peerInfo) + // Add peer1 with { addr3 } and { proto2, proto3 } + peerStore.addressBook.set(peerIds[1], [addr3]) + peerStore.protoBook.set(peerIds[1], [proto2, proto3]) - expect(peerStore.add.callCount).to.equal(0) - expect(peerStore.update.callCount).to.equal(1) - }) + // Add peer2 with { addr4 } + peerStore.addressBook.set(peerIds[2], [addr4]) - it('should emit the "change:multiaddrs" event when a peer has new multiaddrs', async () => { - const defer = pDefer() - const [createdPeerInfo] = await peerUtils.createPeerInfo() + // Add peer3 with { addr4 } and { proto2 } + peerStore.addressBook.set(peerIds[3], [addr4]) + peerStore.protoBook.set(peerIds[3], [proto2]) + }) - // Put the peer in the store - peerStore.put(createdPeerInfo) + it('has peers', () => { + const peers = peerStore.peers - // When updating, "change:multiaddrs" event must not be emitted - peerStore.on('change:multiaddrs', ({ peerInfo, multiaddrs }) => { - expect(peerInfo).to.exist() - expect(peerInfo.id).to.eql(createdPeerInfo.id) - expect(peerInfo.protocols).to.eql(createdPeerInfo.protocols) - expect(multiaddrs).to.exist() - expect(multiaddrs).to.eql(createdPeerInfo.multiaddrs.toArray()) - defer.resolve() - }) - // If no protocols change, the event should not be emitted - peerStore.on('change:protocols', () => { - throw new Error('should not emit change:protocols') + expect(peers.size).to.equal(4) + expect(Array.from(peers.keys())).to.have.members([ + peerIds[0].toB58String(), + peerIds[1].toB58String(), + peerIds[2].toB58String(), + peerIds[3].toB58String() + ]) }) - createdPeerInfo.multiaddrs.add(addr) - peerStore.put(createdPeerInfo) + it('returns true on deleting a stored peer', () => { + const deleted = peerStore.delete(peerIds[0]) + expect(deleted).to.equal(true) - // Wait for peerStore to emit the event - await defer.promise - }) - - it('should emit the "change:protocols" event when a peer has new protocols', async () => { - const defer = pDefer() - const [createdPeerInfo] = await peerUtils.createPeerInfo() + const peers = peerStore.peers + expect(peers.size).to.equal(3) + expect(Array.from(peers.keys())).to.not.have.members([peerIds[0].toB58String()]) + }) - // Put the peer in the store - peerStore.put(createdPeerInfo) + it('returns true on deleting a stored peer which is only on one book', () => { + const deleted = peerStore.delete(peerIds[2]) + expect(deleted).to.equal(true) - // If no multiaddrs change, the event should not be emitted - peerStore.on('change:multiaddrs', () => { - throw new Error('should not emit change:multiaddrs') - }) - // When updating, "change:protocols" event must be emitted - peerStore.on('change:protocols', ({ peerInfo, protocols }) => { - expect(peerInfo).to.exist() - expect(peerInfo.id).to.eql(createdPeerInfo.id) - expect(peerInfo.multiaddrs).to.eql(createdPeerInfo.multiaddrs) - expect(protocols).to.exist() - expect(protocols).to.eql(Array.from(createdPeerInfo.protocols)) - defer.resolve() + const peers = peerStore.peers + expect(peers.size).to.equal(3) }) - createdPeerInfo.protocols.add('/new-protocol/1.0.0') - peerStore.put(createdPeerInfo) - - // Wait for peerStore to emit the event - await defer.promise - }) + it('finds the stored information of a peer in all its books', () => { + const peerInfo = peerStore.find(peerIds[0]) + expect(peerInfo).to.exist() + expect(peerInfo.protocols).to.have.members([proto1]) - it('should be able to retrieve a peer from store through its b58str id', async () => { - const [peerInfo] = await peerUtils.createPeerInfo() - const id = peerInfo.id + const peerMultiaddrs = peerInfo.multiaddrInfos.map((mi) => mi.multiaddr) + expect(peerMultiaddrs).to.have.members([addr1, addr2]) + }) - let retrievedPeer = peerStore.get(id) - expect(retrievedPeer).to.not.exist() + it('finds the stored information of a peer that is not present in all its books', () => { + const peerInfo = peerStore.find(peerIds[2]) + expect(peerInfo).to.exist() + expect(peerInfo.protocols.length).to.eql(0) - // Put the peer in the store - peerStore.put(peerInfo) + const peerMultiaddrs = peerInfo.multiaddrInfos.map((mi) => mi.multiaddr) + expect(peerMultiaddrs).to.have.members([addr4]) + }) - retrievedPeer = peerStore.get(id) - expect(retrievedPeer).to.exist() - expect(retrievedPeer.id).to.equal(peerInfo.id) - expect(retrievedPeer.multiaddrs).to.eql(peerInfo.multiaddrs) - expect(retrievedPeer.protocols).to.eql(peerInfo.protocols) - }) + it('can find all the peers supporting a protocol', () => { + const peerSupporting2 = [] - it('should be able to remove a peer from store through its b58str id', async () => { - const [peerInfo] = await peerUtils.createPeerInfo() - const id = peerInfo.id + for (const [, peerInfo] of peerStore.peers.entries()) { + if (peerInfo.protocols.has(proto2)) { + peerSupporting2.push(peerInfo) + } + } - let removed = peerStore.remove(id) - expect(removed).to.eql(false) + expect(peerSupporting2.length).to.eql(2) + expect(peerSupporting2[0].id.toB58String()).to.eql(peerIds[1].toB58String()) + expect(peerSupporting2[1].id.toB58String()).to.eql(peerIds[3].toB58String()) + }) - // Put the peer in the store - peerStore.put(peerInfo) - expect(peerStore.peers.size).to.equal(1) + it('can find all the peers listening on a given address', () => { + const peerListenint4 = [] - removed = peerStore.remove(id) - expect(removed).to.eql(true) - expect(peerStore.peers.size).to.equal(0) - }) + for (const [, peerInfo] of peerStore.peers.entries()) { + if (peerInfo.multiaddrs.has(addr4)) { + peerListenint4.push(peerInfo) + } + } - it('should be able to get the multiaddrs for a peer', async () => { - const [peerInfo, relayInfo] = await peerUtils.createPeerInfo({ number: 2 }) - const id = peerInfo.id - const ma1 = multiaddr('/ip4/127.0.0.1/tcp/4001') - const ma2 = multiaddr('/ip4/127.0.0.1/tcp/4002/ws') - const ma3 = multiaddr(`/ip4/127.0.0.1/tcp/4003/ws/p2p/${relayInfo.id.toB58String()}/p2p-circuit`) - - peerInfo.multiaddrs.add(ma1) - peerInfo.multiaddrs.add(ma2) - peerInfo.multiaddrs.add(ma3) - - const multiaddrs = peerStore.multiaddrsForPeer(peerInfo) - const expectedAddrs = [ - ma1.encapsulate(`/p2p/${id.toB58String()}`), - ma2.encapsulate(`/p2p/${id.toB58String()}`), - ma3.encapsulate(`/p2p/${id.toB58String()}`) - ] - - expect(multiaddrs).to.eql(expectedAddrs) + expect(peerListenint4.length).to.eql(2) + expect(peerListenint4[0].id.toB58String()).to.eql(peerIds[2].toB58String()) + expect(peerListenint4[1].id.toB58String()).to.eql(peerIds[3].toB58String()) + }) }) }) - -describe('peer-store on discovery', () => { - // TODO: implement with discovery -}) diff --git a/test/peer-store/proto-book.spec.js b/test/peer-store/proto-book.spec.js new file mode 100644 index 0000000000..7985dfe1dc --- /dev/null +++ b/test/peer-store/proto-book.spec.js @@ -0,0 +1,310 @@ +'use strict' +/* eslint-env mocha */ + +const chai = require('chai') +chai.use(require('dirty-chai')) +const { expect } = chai + +const { EventEmitter } = require('events') +const pDefer = require('p-defer') + +const ProtoBook = require('../../src/peer-store/proto-book') + +const peerUtils = require('../utils/creators/peer') +const { + ERR_INVALID_PARAMETERS +} = require('../../src/errors') + +const arraysAreEqual = (a, b) => a.length === b.length && a.sort().every((item, index) => b[index] === item) + +describe('protoBook', () => { + let peerId + + before(async () => { + [peerId] = await peerUtils.createPeerId() + }) + + describe('protoBook.set', () => { + let ee, pb + + beforeEach(() => { + ee = new EventEmitter() + pb = new ProtoBook(ee) + }) + + afterEach(() => { + ee.removeAllListeners() + }) + + it('throwns invalid parameters error if invalid PeerId is provided', () => { + expect(() => { + pb.set('invalid peerId') + }).to.throw(ERR_INVALID_PARAMETERS) + }) + + it('throwns invalid parameters error if no protocols provided', () => { + expect(() => { + pb.set(peerId) + }).to.throw(ERR_INVALID_PARAMETERS) + }) + + it('replaces the stored content by default and emit change event', () => { + const defer = pDefer() + const supportedProtocols = ['protocol1', 'protocol2'] + + ee.once('change:protocols', ({ peerId, protocols }) => { + expect(peerId).to.exist() + expect(protocols).to.have.deep.members(supportedProtocols) + defer.resolve() + }) + + pb.set(peerId, supportedProtocols) + const protocols = pb.get(peerId) + expect(protocols).to.have.deep.members(supportedProtocols) + + return defer.promise + }) + + it('emits on set if not storing the exact same content', () => { + const defer = pDefer() + + const supportedProtocolsA = ['protocol1', 'protocol2'] + const supportedProtocolsB = ['protocol2'] + + let changeCounter = 0 + ee.on('change:protocols', () => { + changeCounter++ + if (changeCounter > 1) { + defer.resolve() + } + }) + + // set 1 + pb.set(peerId, supportedProtocolsA) + + // set 2 (same content) + pb.set(peerId, supportedProtocolsB) + const protocols = pb.get(peerId) + expect(protocols).to.have.deep.members(supportedProtocolsB) + + return defer.promise + }) + + it('does not emit on set if it is storing the exact same content', () => { + const defer = pDefer() + + const supportedProtocols = ['protocol1', 'protocol2'] + + let changeCounter = 0 + ee.on('change:protocols', () => { + changeCounter++ + if (changeCounter > 1) { + defer.reject() + } + }) + + // set 1 + pb.set(peerId, supportedProtocols) + + // set 2 (same content) + pb.set(peerId, supportedProtocols) + + // Wait 50ms for incorrect second event + setTimeout(() => { + defer.resolve() + }, 50) + + return defer.promise + }) + }) + + describe('protoBook.add', () => { + let ee, pb + + beforeEach(() => { + ee = new EventEmitter() + pb = new ProtoBook(ee) + }) + + afterEach(() => { + ee.removeAllListeners() + }) + + it('throwns invalid parameters error if invalid PeerId is provided', () => { + expect(() => { + pb.add('invalid peerId') + }).to.throw(ERR_INVALID_PARAMETERS) + }) + + it('throwns invalid parameters error if no protocols provided', () => { + expect(() => { + pb.add(peerId) + }).to.throw(ERR_INVALID_PARAMETERS) + }) + + it('adds the new content and emits change event', () => { + const defer = pDefer() + + const supportedProtocolsA = ['protocol1', 'protocol2'] + const supportedProtocolsB = ['protocol3'] + const finalProtocols = supportedProtocolsA.concat(supportedProtocolsB) + + let changeTrigger = 2 + ee.on('change:protocols', ({ protocols }) => { + changeTrigger-- + if (changeTrigger === 0 && arraysAreEqual(protocols, finalProtocols)) { + defer.resolve() + } + }) + + // Replace + pb.set(peerId, supportedProtocolsA) + let protocols = pb.get(peerId) + expect(protocols).to.have.deep.members(supportedProtocolsA) + + // Add + pb.add(peerId, supportedProtocolsB) + protocols = pb.get(peerId) + expect(protocols).to.have.deep.members(finalProtocols) + + return defer.promise + }) + + it('emits on add if the content to add not exists', () => { + const defer = pDefer() + + const supportedProtocolsA = ['protocol1'] + const supportedProtocolsB = ['protocol2'] + const finalProtocols = supportedProtocolsA.concat(supportedProtocolsB) + + let changeCounter = 0 + ee.on('change:protocols', () => { + changeCounter++ + if (changeCounter > 1) { + defer.resolve() + } + }) + + // set 1 + pb.set(peerId, supportedProtocolsA) + + // set 2 (content already existing) + pb.add(peerId, supportedProtocolsB) + const protocols = pb.get(peerId) + expect(protocols).to.have.deep.members(finalProtocols) + + return defer.promise + }) + + it('does not emit on add if the content to add already exists', () => { + const defer = pDefer() + + const supportedProtocolsA = ['protocol1', 'protocol2'] + const supportedProtocolsB = ['protocol2'] + + let changeCounter = 0 + ee.on('change:protocols', () => { + changeCounter++ + if (changeCounter > 1) { + defer.reject() + } + }) + + // set 1 + pb.set(peerId, supportedProtocolsA) + + // set 2 (content already existing) + pb.add(peerId, supportedProtocolsB) + + // Wait 50ms for incorrect second event + setTimeout(() => { + defer.resolve() + }, 50) + + return defer.promise + }) + }) + + describe('protoBook.get', () => { + let ee, pb + + beforeEach(() => { + ee = new EventEmitter() + pb = new ProtoBook(ee) + }) + + it('throwns invalid parameters error if invalid PeerId is provided', () => { + expect(() => { + pb.get('invalid peerId') + }).to.throw(ERR_INVALID_PARAMETERS) + }) + + it('returns undefined if no protocols are known for the provided peer', () => { + const protocols = pb.get(peerId) + + expect(protocols).to.not.exist() + }) + + it('returns the protocols stored', () => { + const supportedProtocols = ['protocol1', 'protocol2'] + + pb.set(peerId, supportedProtocols) + + const protocols = pb.get(peerId) + expect(protocols).to.have.deep.members(supportedProtocols) + }) + }) + + describe('protoBook.delete', () => { + let ee, pb + + beforeEach(() => { + ee = new EventEmitter() + pb = new ProtoBook(ee) + }) + + it('throwns invalid parameters error if invalid PeerId is provided', () => { + expect(() => { + pb.delete('invalid peerId') + }).to.throw(ERR_INVALID_PARAMETERS) + }) + + it('returns false if no records exist for the peer and no event is emitted', () => { + const defer = pDefer() + + ee.on('change:protocols', () => { + defer.reject() + }) + + const deleted = pb.delete(peerId) + + expect(deleted).to.equal(false) + + // Wait 50ms for incorrect invalid event + setTimeout(() => { + defer.resolve() + }, 50) + + return defer.promise + }) + + it('returns true if the record exists and an event is emitted', () => { + const defer = pDefer() + + const supportedProtocols = ['protocol1', 'protocol2'] + pb.set(peerId, supportedProtocols) + + // Listen after set + ee.on('change:protocols', ({ protocols }) => { + expect(protocols.length).to.eql(0) + defer.resolve() + }) + + const deleted = pb.delete(peerId) + + expect(deleted).to.equal(true) + + return defer.promise + }) + }) +}) diff --git a/test/registrar/registrar.spec.js b/test/registrar/registrar.spec.js index 056c4b49ab..3e2744901e 100644 --- a/test/registrar/registrar.spec.js +++ b/test/registrar/registrar.spec.js @@ -89,7 +89,9 @@ describe('registrar', () => { remotePeerInfo.protocols.add(multicodec) // Add connected peer to peerStore and registrar - peerStore.put(remotePeerInfo) + peerStore.addressBook.set(remotePeerInfo.id, remotePeerInfo.multiaddrs.toArray()) + peerStore.protoBook.set(remotePeerInfo.id, Array.from(remotePeerInfo.protocols)) + registrar.onConnect(remotePeerInfo, conn) expect(registrar.connections.size).to.eql(1) @@ -156,18 +158,23 @@ describe('registrar', () => { const peerInfo = await PeerInfo.create(conn.remotePeer) // Add connected peer to peerStore and registrar - peerStore.put(peerInfo) + peerStore.addressBook.set(peerInfo.id, peerInfo.multiaddrs.toArray()) + peerStore.protoBook.set(peerInfo.id, Array.from(peerInfo.protocols)) + registrar.onConnect(peerInfo, conn) // Add protocol to peer and update it peerInfo.protocols.add(multicodec) - peerStore.put(peerInfo) + peerStore.addressBook.add(peerInfo.id, peerInfo.multiaddrs.toArray()) + peerStore.protoBook.add(peerInfo.id, Array.from(peerInfo.protocols)) await onConnectDefer.promise // Remove protocol to peer and update it peerInfo.protocols.delete(multicodec) - peerStore.replace(peerInfo) + + peerStore.addressBook.set(peerInfo.id, peerInfo.multiaddrs.toArray()) + peerStore.protoBook.set(peerInfo.id, Array.from(peerInfo.protocols)) await onDisconnectDefer.promise }) @@ -197,7 +204,8 @@ describe('registrar', () => { const id = peerInfo.id.toB58String() // Add connection to registrar - peerStore.put(peerInfo) + peerStore.addressBook.set(peerInfo.id, peerInfo.multiaddrs.toArray()) + peerStore.protoBook.set(peerInfo.id, Array.from(peerInfo.protocols)) registrar.onConnect(peerInfo, conn1) registrar.onConnect(peerInfo, conn2) From 54212cbf2601d8b8f341e693ff1c91f276d81544 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Thu, 16 Apr 2020 15:20:42 +0200 Subject: [PATCH 089/102] chore: deprecate old peer store api (#598) * chore: deprecate old peer-store api BREAKING CHANGE: the peer-store api changed. Check the API docs for the new specification. * chore: apply suggestions from code review Co-Authored-By: Jacob Heun * chore: apply suggestions from code review Co-Authored-By: Jacob Heun Co-authored-by: Jacob Heun --- doc/API.md | 28 +++++- package.json | 4 +- src/peer-store/address-book.js | 4 +- src/peer-store/book.js | 6 ++ src/peer-store/index.js | 102 ++------------------- src/peer-store/proto-book.js | 4 +- test/content-routing/dht/operation.node.js | 2 +- test/peer-store/address-book.spec.js | 53 ++++++----- test/peer-store/peer-store.spec.js | 10 +- test/peer-store/proto-book.spec.js | 47 +++++----- 10 files changed, 106 insertions(+), 154 deletions(-) diff --git a/doc/API.md b/doc/API.md index bfaa9f778b..b8d4c16c5d 100644 --- a/doc/API.md +++ b/doc/API.md @@ -41,6 +41,8 @@ * [`metrics.forPeer`](#metricsforpeer) * [`metrics.forProtocol`](#metricsforprotocol) * [Events](#events) + * [`libp2p`](#libp2p) + * [`libp2p.peerStore`](#libp2ppeerStore) * [Types](#types) * [`Stats`](#stats) @@ -1099,7 +1101,9 @@ console.log(peerStats.toJSON()) ## Events -Once you have a libp2p instance, you can listen to several events it emits, so that you can be notified of relevant network events. +Once you have a libp2p instance, you can listen to several events it emits, so that you can be notified of relevant network events. + +### libp2p #### An error has occurred @@ -1132,6 +1136,28 @@ This event will be triggered anytime we are disconnected from another peer, rega - `peer`: instance of [`PeerInfo`][peer-info] +### libp2p.peerStore + +#### A new peer is added to the peerStore + +`libp2p.peerStore.on('peer', (peerId) => {})` + +- `peerId`: instance of [`PeerId`][peer-id] + +#### Known multiaddrs for a peer change + +`libp2p.peerStore.on('change:multiaddrs', ({ peerId, multiaddrs}) => {})` + +- `peerId`: instance of [`PeerId`][peer-id] +- `multiaddrs`: array of known [`multiaddr`][multiaddr] for the peer + +#### Known protocols for a peer change + +`libp2p.peerStore.on('change:protocols', ({ peerId, protocols}) => {})` + +- `peerId`: instance of [`PeerId`][peer-id] +- `protocols`: array of known, supported protocols for the peer (string identifiers) + ## Types ### Stats diff --git a/package.json b/package.json index 0a72e4ab44..86a71bb0b3 100644 --- a/package.json +++ b/package.json @@ -84,7 +84,7 @@ "cids": "^0.8.0", "delay": "^4.3.0", "dirty-chai": "^2.0.1", - "interop-libp2p": "~0.0.1", + "interop-libp2p": "libp2p/interop#chore/update-libp2p-daemon-with-peerstore", "it-concat": "^1.0.0", "it-pair": "^1.0.0", "it-pushable": "^1.4.0", @@ -93,7 +93,7 @@ "libp2p-delegated-peer-routing": "^0.4.0", "libp2p-floodsub": "^0.20.0", "libp2p-gossipsub": "^0.2.0", - "libp2p-kad-dht": "^0.18.2", + "libp2p-kad-dht": "^0.19.0-pre.0", "libp2p-mdns": "^0.13.0", "libp2p-mplex": "^0.9.1", "libp2p-secio": "^0.12.1", diff --git a/src/peer-store/address-book.js b/src/peer-store/address-book.js index 445d731e1e..6ae4623041 100644 --- a/src/peer-store/address-book.js +++ b/src/peer-store/address-book.js @@ -28,7 +28,7 @@ class AddressBook extends Book { /** * @constructor - * @param {EventEmitter} peerStore + * @param {PeerStore} peerStore */ constructor (peerStore) { /** @@ -80,6 +80,7 @@ class AddressBook extends Book { } this.data.set(id, multiaddrInfos) + this._setPeerId(peerId) log(`stored provided multiaddrs for ${id}`) // TODO: Remove peerInfo and its usage on peer-info deprecate @@ -133,6 +134,7 @@ class AddressBook extends Book { return this } + this._setPeerId(peerId) this.data.set(id, multiaddrInfos) log(`added provided multiaddrs for ${id}`) diff --git a/src/peer-store/book.js b/src/peer-store/book.js index 32456b7c13..c44b55d5c2 100644 --- a/src/peer-store/book.js +++ b/src/peer-store/book.js @@ -82,6 +82,12 @@ class Book { return true } + + _setPeerId (peerId) { + if (!this._ps.peerIds.get(peerId)) { + this._ps.peerIds.set(peerId.toB58String(), peerId) + } + } } module.exports = Book diff --git a/src/peer-store/index.js b/src/peer-store/index.js index 597b69e178..589a186dca 100644 --- a/src/peer-store/index.js +++ b/src/peer-store/index.js @@ -43,97 +43,13 @@ class PeerStore extends EventEmitter { * ProtoBook containing a map of peerIdStr to supported protocols. */ this.protoBook = new ProtoBook(this) - } - - // TODO: Temporary adapter for modules using PeerStore - // This should be removed under a breaking change - /** - * Stores the peerInfo of a new peer on each book. - * @param {PeerInfo} peerInfo - * @param {object} [options] - * @param {boolean} [options.replace = true] - * @return {PeerInfo} - */ - put (peerInfo, options) { - const multiaddrs = peerInfo.multiaddrs.toArray() - const protocols = Array.from(peerInfo.protocols || new Set()) - - this.addressBook.set(peerInfo.id, multiaddrs, options) - this.protoBook.set(peerInfo.id, protocols, options) - - const peer = this.find(peerInfo.id) - const pInfo = new PeerInfo(peerInfo.id) - - if (!peer) { - return pInfo - } - - peer.protocols.forEach((p) => pInfo.protocols.add(p)) - peer.multiaddrInfos.forEach((mi) => pInfo.multiaddrs.add(mi.multiaddr)) - - return pInfo - } - - // TODO: Temporary adapter for modules using PeerStore - // This should be removed under a breaking change - /** - * Get the info of the given id. - * @param {peerId} peerId - * @returns {PeerInfo} - */ - get (peerId) { - const peer = this.find(peerId) - - const pInfo = new PeerInfo(peerId) - peer.protocols.forEach((p) => pInfo.protocols.add(p)) - peer.multiaddrInfos.forEach((mi) => pInfo.multiaddrs.add(mi.multiaddr)) - - return pInfo - } - - // TODO: Temporary adapter for modules using PeerStore - // This should be removed under a breaking change - /** - * Has the info to the given id. - * @param {PeerId} peerId - * @returns {boolean} - */ - has (peerId) { - return Boolean(this.find(peerId)) - } - - // TODO: Temporary adapter for modules using PeerStore - // This should be removed under a breaking change - /** - * Removes the peer provided. - * @param {PeerId} peerId - * @returns {boolean} true if found and removed - */ - remove (peerId) { - return this.delete(peerId) - } - // TODO: Temporary adapter for modules using PeerStore - // This should be removed under a breaking change - /** - * Completely replaces the existing peers metadata with the given `peerInfo` - * @param {PeerInfo} peerInfo - * @returns {void} - */ - replace (peerInfo) { - this.put(peerInfo) - } - - // TODO: Temporary adapter for modules using PeerStore - // This should be removed under a breaking change - /** - * Returns the known multiaddrs for a given `PeerInfo`. All returned multiaddrs - * will include the encapsulated `PeerId` of the peer. - * @param {PeerInfo} peerInfo - * @returns {Array} - */ - multiaddrsForPeer (peerInfo) { - return this.addressBook.getMultiaddrsForPeer(peerInfo.id) + /** + * TODO: this should only exist until we have the key-book + * Map known peers to their peer-id. + * @type {Map} + */ + this.peerIds = new Map() } /** @@ -195,15 +111,16 @@ class PeerStore extends EventEmitter { } /** - * Find the stored information of a given peer. + * Get the stored information of a given peer. * @param {PeerId} peerId * @returns {peerInfo} */ - find (peerId) { + get (peerId) { if (!PeerId.isPeerId(peerId)) { throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS) } + const id = this.peerIds.get(peerId.toB58String()) const multiaddrInfos = this.addressBook.get(peerId) const protocols = this.protoBook.get(peerId) @@ -212,6 +129,7 @@ class PeerStore extends EventEmitter { } return { + id: id || peerId, multiaddrInfos: multiaddrInfos || [], protocols: protocols || [] } diff --git a/src/peer-store/proto-book.js b/src/peer-store/proto-book.js index a5c5867ee8..7f73812524 100644 --- a/src/peer-store/proto-book.js +++ b/src/peer-store/proto-book.js @@ -22,7 +22,7 @@ const { class ProtoBook extends Book { /** * @constructor - * @param {EventEmitter} peerStore + * @param {PeerStore} peerStore */ constructor (peerStore) { /** @@ -71,6 +71,7 @@ class ProtoBook extends Book { } this.data.set(id, newSet) + this._setPeerId(peerId) log(`stored provided protocols for ${id}`) // TODO: Remove peerInfo and its usage on peer-info deprecate @@ -118,6 +119,7 @@ class ProtoBook extends Book { protocols = [...newSet] this.data.set(id, newSet) + this._setPeerId(peerId) log(`added provided protocols for ${id}`) // TODO: Remove peerInfo and its usage on peer-info deprecate diff --git a/test/content-routing/dht/operation.node.js b/test/content-routing/dht/operation.node.js index 538d8b326f..53643c3c3a 100644 --- a/test/content-routing/dht/operation.node.js +++ b/test/content-routing/dht/operation.node.js @@ -74,8 +74,8 @@ describe('DHT subsystem operates correctly', () => { ]) await libp2p.contentRouting.put(key, value) - const fetchedValue = await remoteLibp2p.contentRouting.get(key) + expect(fetchedValue).to.eql(value) }) }) diff --git a/test/peer-store/address-book.spec.js b/test/peer-store/address-book.spec.js index 1ec11eb742..fba8d72c64 100644 --- a/test/peer-store/address-book.spec.js +++ b/test/peer-store/address-book.spec.js @@ -5,11 +5,10 @@ const chai = require('chai') chai.use(require('dirty-chai')) const { expect } = chai -const { EventEmitter } = require('events') const pDefer = require('p-defer') const multiaddr = require('multiaddr') -const AddressBook = require('../../src/peer-store/address-book') +const PeerStore = require('../../src/peer-store') const peerUtils = require('../utils/creators/peer') const { @@ -30,15 +29,15 @@ describe('addressBook', () => { }) describe('addressBook.set', () => { - let ee, ab + let peerStore, ab beforeEach(() => { - ee = new EventEmitter() - ab = new AddressBook(ee) + peerStore = new PeerStore() + ab = peerStore.addressBook }) afterEach(() => { - ee.removeAllListeners() + peerStore.removeAllListeners() }) it('throwns invalid parameters error if invalid PeerId is provided', () => { @@ -63,7 +62,7 @@ describe('addressBook', () => { const defer = pDefer() const supportedMultiaddrs = [addr1, addr2] - ee.once('change:multiaddrs', ({ peerId, multiaddrs }) => { + peerStore.once('change:multiaddrs', ({ peerId, multiaddrs }) => { expect(peerId).to.exist() expect(multiaddrs).to.eql(supportedMultiaddrs) defer.resolve() @@ -84,7 +83,7 @@ describe('addressBook', () => { const supportedMultiaddrsB = [addr2] let changeCounter = 0 - ee.on('change:multiaddrs', () => { + peerStore.on('change:multiaddrs', () => { changeCounter++ if (changeCounter > 1) { defer.resolve() @@ -109,7 +108,7 @@ describe('addressBook', () => { const supportedMultiaddrs = [addr1, addr2] let changeCounter = 0 - ee.on('change:multiaddrs', () => { + peerStore.on('change:multiaddrs', () => { changeCounter++ if (changeCounter > 1) { defer.reject() @@ -132,15 +131,15 @@ describe('addressBook', () => { }) describe('addressBook.add', () => { - let ee, ab + let peerStore, ab beforeEach(() => { - ee = new EventEmitter() - ab = new AddressBook(ee) + peerStore = new PeerStore() + ab = peerStore.addressBook }) afterEach(() => { - ee.removeAllListeners() + peerStore.removeAllListeners() }) it('throwns invalid parameters error if invalid PeerId is provided', () => { @@ -169,7 +168,7 @@ describe('addressBook', () => { const finalMultiaddrs = supportedMultiaddrsA.concat(supportedMultiaddrsB) let changeTrigger = 2 - ee.on('change:multiaddrs', ({ multiaddrs }) => { + peerStore.on('change:multiaddrs', ({ multiaddrs }) => { changeTrigger-- if (changeTrigger === 0 && arraysAreEqual(multiaddrs, finalMultiaddrs)) { defer.resolve() @@ -199,7 +198,7 @@ describe('addressBook', () => { const finalMultiaddrs = supportedMultiaddrsA.concat(supportedMultiaddrsB) let changeCounter = 0 - ee.on('change:multiaddrs', () => { + peerStore.on('change:multiaddrs', () => { changeCounter++ if (changeCounter > 1) { defer.resolve() @@ -225,7 +224,7 @@ describe('addressBook', () => { const supportedMultiaddrsB = [addr2] let changeCounter = 0 - ee.on('change:multiaddrs', () => { + peerStore.on('change:multiaddrs', () => { changeCounter++ if (changeCounter > 1) { defer.reject() @@ -248,11 +247,11 @@ describe('addressBook', () => { }) describe('addressBook.get', () => { - let ee, ab + let peerStore, ab beforeEach(() => { - ee = new EventEmitter() - ab = new AddressBook(ee) + peerStore = new PeerStore() + ab = peerStore.addressBook }) it('throwns invalid parameters error if invalid PeerId is provided', () => { @@ -279,11 +278,11 @@ describe('addressBook', () => { }) describe('addressBook.getMultiaddrsForPeer', () => { - let ee, ab + let peerStore, ab beforeEach(() => { - ee = new EventEmitter() - ab = new AddressBook(ee) + peerStore = new PeerStore() + ab = peerStore.addressBook }) it('throwns invalid parameters error if invalid PeerId is provided', () => { @@ -311,11 +310,11 @@ describe('addressBook', () => { }) describe('addressBook.delete', () => { - let ee, ab + let peerStore, ab beforeEach(() => { - ee = new EventEmitter() - ab = new AddressBook(ee) + peerStore = new PeerStore() + ab = peerStore.addressBook }) it('throwns invalid parameters error if invalid PeerId is provided', () => { @@ -327,7 +326,7 @@ describe('addressBook', () => { it('returns false if no records exist for the peer and no event is emitted', () => { const defer = pDefer() - ee.on('change:multiaddrs', () => { + peerStore.on('change:multiaddrs', () => { defer.reject() }) @@ -350,7 +349,7 @@ describe('addressBook', () => { ab.set(peerId, supportedMultiaddrs) // Listen after set - ee.on('change:multiaddrs', ({ multiaddrs }) => { + peerStore.on('change:multiaddrs', ({ multiaddrs }) => { expect(multiaddrs.length).to.eql(0) defer.resolve() }) diff --git a/test/peer-store/peer-store.spec.js b/test/peer-store/peer-store.spec.js index 411977f86d..19b74af7ff 100644 --- a/test/peer-store/peer-store.spec.js +++ b/test/peer-store/peer-store.spec.js @@ -45,7 +45,7 @@ describe('peer-store', () => { }) it('returns undefined on trying to find a non existant peerId', () => { - const peerInfo = peerStore.find(peerIds[0]) + const peerInfo = peerStore.get(peerIds[0]) expect(peerInfo).to.not.exist() }) }) @@ -101,8 +101,8 @@ describe('peer-store', () => { expect(peers.size).to.equal(3) }) - it('finds the stored information of a peer in all its books', () => { - const peerInfo = peerStore.find(peerIds[0]) + it('gets the stored information of a peer in all its books', () => { + const peerInfo = peerStore.get(peerIds[0]) expect(peerInfo).to.exist() expect(peerInfo.protocols).to.have.members([proto1]) @@ -110,8 +110,8 @@ describe('peer-store', () => { expect(peerMultiaddrs).to.have.members([addr1, addr2]) }) - it('finds the stored information of a peer that is not present in all its books', () => { - const peerInfo = peerStore.find(peerIds[2]) + it('gets the stored information of a peer that is not present in all its books', () => { + const peerInfo = peerStore.get(peerIds[2]) expect(peerInfo).to.exist() expect(peerInfo.protocols.length).to.eql(0) diff --git a/test/peer-store/proto-book.spec.js b/test/peer-store/proto-book.spec.js index 7985dfe1dc..d153698aa6 100644 --- a/test/peer-store/proto-book.spec.js +++ b/test/peer-store/proto-book.spec.js @@ -5,10 +5,9 @@ const chai = require('chai') chai.use(require('dirty-chai')) const { expect } = chai -const { EventEmitter } = require('events') const pDefer = require('p-defer') -const ProtoBook = require('../../src/peer-store/proto-book') +const PeerStore = require('../../src/peer-store') const peerUtils = require('../utils/creators/peer') const { @@ -25,15 +24,15 @@ describe('protoBook', () => { }) describe('protoBook.set', () => { - let ee, pb + let peerStore, pb beforeEach(() => { - ee = new EventEmitter() - pb = new ProtoBook(ee) + peerStore = new PeerStore() + pb = peerStore.protoBook }) afterEach(() => { - ee.removeAllListeners() + peerStore.removeAllListeners() }) it('throwns invalid parameters error if invalid PeerId is provided', () => { @@ -52,7 +51,7 @@ describe('protoBook', () => { const defer = pDefer() const supportedProtocols = ['protocol1', 'protocol2'] - ee.once('change:protocols', ({ peerId, protocols }) => { + peerStore.once('change:protocols', ({ peerId, protocols }) => { expect(peerId).to.exist() expect(protocols).to.have.deep.members(supportedProtocols) defer.resolve() @@ -72,7 +71,7 @@ describe('protoBook', () => { const supportedProtocolsB = ['protocol2'] let changeCounter = 0 - ee.on('change:protocols', () => { + peerStore.on('change:protocols', () => { changeCounter++ if (changeCounter > 1) { defer.resolve() @@ -96,7 +95,7 @@ describe('protoBook', () => { const supportedProtocols = ['protocol1', 'protocol2'] let changeCounter = 0 - ee.on('change:protocols', () => { + peerStore.on('change:protocols', () => { changeCounter++ if (changeCounter > 1) { defer.reject() @@ -119,15 +118,15 @@ describe('protoBook', () => { }) describe('protoBook.add', () => { - let ee, pb + let peerStore, pb beforeEach(() => { - ee = new EventEmitter() - pb = new ProtoBook(ee) + peerStore = new PeerStore() + pb = peerStore.protoBook }) afterEach(() => { - ee.removeAllListeners() + peerStore.removeAllListeners() }) it('throwns invalid parameters error if invalid PeerId is provided', () => { @@ -150,7 +149,7 @@ describe('protoBook', () => { const finalProtocols = supportedProtocolsA.concat(supportedProtocolsB) let changeTrigger = 2 - ee.on('change:protocols', ({ protocols }) => { + peerStore.on('change:protocols', ({ protocols }) => { changeTrigger-- if (changeTrigger === 0 && arraysAreEqual(protocols, finalProtocols)) { defer.resolve() @@ -178,7 +177,7 @@ describe('protoBook', () => { const finalProtocols = supportedProtocolsA.concat(supportedProtocolsB) let changeCounter = 0 - ee.on('change:protocols', () => { + peerStore.on('change:protocols', () => { changeCounter++ if (changeCounter > 1) { defer.resolve() @@ -203,7 +202,7 @@ describe('protoBook', () => { const supportedProtocolsB = ['protocol2'] let changeCounter = 0 - ee.on('change:protocols', () => { + peerStore.on('change:protocols', () => { changeCounter++ if (changeCounter > 1) { defer.reject() @@ -226,11 +225,11 @@ describe('protoBook', () => { }) describe('protoBook.get', () => { - let ee, pb + let peerStore, pb beforeEach(() => { - ee = new EventEmitter() - pb = new ProtoBook(ee) + peerStore = new PeerStore() + pb = peerStore.protoBook }) it('throwns invalid parameters error if invalid PeerId is provided', () => { @@ -256,11 +255,11 @@ describe('protoBook', () => { }) describe('protoBook.delete', () => { - let ee, pb + let peerStore, pb beforeEach(() => { - ee = new EventEmitter() - pb = new ProtoBook(ee) + peerStore = new PeerStore() + pb = peerStore.protoBook }) it('throwns invalid parameters error if invalid PeerId is provided', () => { @@ -272,7 +271,7 @@ describe('protoBook', () => { it('returns false if no records exist for the peer and no event is emitted', () => { const defer = pDefer() - ee.on('change:protocols', () => { + peerStore.on('change:protocols', () => { defer.reject() }) @@ -295,7 +294,7 @@ describe('protoBook', () => { pb.set(peerId, supportedProtocols) // Listen after set - ee.on('change:protocols', ({ protocols }) => { + peerStore.on('change:protocols', ({ protocols }) => { expect(protocols.length).to.eql(0) defer.resolve() }) From 0f6e8781dbf50031bac6a7c6cc87c161bc625fd8 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Tue, 14 Apr 2020 14:05:30 +0200 Subject: [PATCH 090/102] chore: remove peer-info usage BREAKING CHANGE: all API methods with peer-info parameters or return values were changed. You can check the API.md document, in order to check the new values to use --- .aegir.js | 8 +- doc/API.md | 61 ++++---- package.json | 20 +-- src/circuit/circuit/hop.js | 9 +- src/circuit/index.js | 12 +- src/config.js | 3 + src/connection-manager/index.js | 2 +- src/content-routing.js | 6 +- src/dialer/index.js | 74 +++------- src/errors.js | 3 +- src/get-peer-id.js | 41 ++++++ src/get-peer-info.js | 78 ---------- src/identify/index.js | 19 ++- src/index.js | 138 +++++++++--------- src/peer-routing.js | 2 +- src/peer-store/address-book.js | 17 +-- src/peer-store/book.js | 5 - src/peer-store/index.js | 57 +++----- src/peer-store/proto-book.js | 11 -- src/pubsub.js | 2 +- src/registrar.js | 52 +++---- test/content-routing/content-routing.node.js | 10 +- .../content-routing/dht/configuration.node.js | 24 +-- test/content-routing/dht/operation.node.js | 35 +++-- test/core/listening.node.js | 12 +- test/core/ping.node.js | 9 +- test/dialing/direct.node.js | 82 ++++------- test/dialing/direct.spec.js | 18 +-- test/dialing/relay.node.js | 57 ++++---- test/identify/index.spec.js | 62 ++++---- test/metrics/index.node.js | 9 +- test/peer-discovery/index.node.js | 74 ++++++---- test/peer-discovery/index.spec.js | 35 ++--- test/peer-routing/peer-routing.node.js | 12 +- test/peer-store/peer-store.spec.js | 6 +- test/pubsub/configuration.node.js | 24 +-- test/pubsub/implementations.node.js | 22 +-- test/pubsub/operation.node.js | 43 ++++-- test/registrar/registrar.node.js | 18 +-- test/registrar/registrar.spec.js | 58 +++----- test/registrar/utils.js | 6 +- test/transports/transport-manager.spec.js | 12 +- test/upgrading/upgrader.spec.js | 23 ++- test/utils/creators/peer.js | 37 +++-- test/utils/mockConnection.js | 6 +- 45 files changed, 614 insertions(+), 700 deletions(-) create mode 100644 src/get-peer-id.js delete mode 100644 src/get-peer-info.js diff --git a/.aegir.js b/.aegir.js index f5eb73392c..2b1799a17a 100644 --- a/.aegir.js +++ b/.aegir.js @@ -4,7 +4,6 @@ const Libp2p = require('./src') const { MULTIADDRS_WEBSOCKETS } = require('./test/fixtures/browser') const Peers = require('./test/fixtures/peers') const PeerId = require('peer-id') -const PeerInfo = require('peer-info') const WebSockets = require('libp2p-websockets') const Muxer = require('libp2p-mplex') const Crypto = require('libp2p-secio') @@ -14,11 +13,12 @@ let libp2p const before = async () => { // Use the last peer const peerId = await PeerId.createFromJSON(Peers[Peers.length - 1]) - const peerInfo = new PeerInfo(peerId) - peerInfo.multiaddrs.add(MULTIADDRS_WEBSOCKETS[0]) libp2p = new Libp2p({ - peerInfo, + addresses: { + listen: [MULTIADDRS_WEBSOCKETS[0]] + }, + peerId, modules: { transport: [WebSockets], streamMuxer: [Muxer], diff --git a/doc/API.md b/doc/API.md index b8d4c16c5d..2f70a1b432 100644 --- a/doc/API.md +++ b/doc/API.md @@ -60,12 +60,13 @@ Creates an instance of Libp2p. |------|------|-------------| | options | `object` | libp2p options | | options.modules | `Array` | libp2p modules to use | +| [options.addresses] | `{ listen: Array }` | Addresses to use for transport listening and to announce to the network | | [options.config] | `object` | libp2p modules configuration and core configuration | | [options.connectionManager] | `object` | libp2p Connection Manager configuration | | [options.datastore] | `object` | must implement [ipfs/interface-datastore](https://github.com/ipfs/interface-datastore) (in memory datastore will be used if not provided) | | [options.dialer] | `object` | libp2p Dialer configuration | [options.metrics] | `object` | libp2p Metrics configuration -| [options.peerInfo] | [`PeerInfo`][peer-info] | peerInfo instance (it will be created if not provided) | +| [options.peerId] | [`PeerId`][peer-id] | peerId instance (it will be created if not provided) | For Libp2p configurations and modules details read the [Configuration Document](./CONFIGURATION.md). @@ -87,7 +88,7 @@ const options = {} const libp2p = await Libp2p.create(options) ``` -Note: The [`PeerInfo`][peer-info] option is not required and will be generated if it is not provided. +Note: The [`PeerId`][peer-id] option is not required and will be generated if it is not provided.
Alternative As an alternative, it is possible to create a Libp2p instance with the constructor: @@ -106,7 +107,7 @@ const libp2p = new Libp2p(options) Required keys in the `options` object: -- `peerInfo`: instance of [`PeerInfo`][peer-info] that contains the [`PeerId`][peer-id], Keys and [multiaddrs][multiaddr] of the libp2p Node (optional when using `.create`). +- `peerId`: instance of [`PeerId`][peer-id] that contains the peer Keys (optional when using `.create`). - `modules.transport`: An array that must include at least 1 compliant transport. See [modules that implement the transport interface](https://github.com/libp2p/js-interfaces/tree/master/src/transport#modules-that-implement-the-interface).
@@ -163,6 +164,10 @@ const libp2p = await Libp2p.create(options) await libp2p.stop() ``` +### addresses + +TODO with `address-manager`. + ### connections A Getter that returns a Map of the current Connections libp2p has to other peers. @@ -194,10 +199,12 @@ for (const [peerId, connections] of libp2p.connections) { | Name | Type | Description | |------|------|-------------| -| peer | [`PeerInfo`][peer-info]\|[`PeerId`][peer-id]\|[`Multiaddr`][multiaddr]\|`string` | The peer to dial. If a [`Multiaddr`][multiaddr] or its string is provided, it **must** include the peer id | +| peer | [`PeerId`][peer-id]\|[`Multiaddr`][multiaddr]\|`string` | The peer to dial. | | [options] | `object` | dial options | | [options.signal] | [`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal) | An `AbortSignal` instance obtained from an [`AbortController`](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) that can be used to abort the connection before it completes | +**Note:** If a [`Multiaddr`][multiaddr] or its string is provided, it **must** include the peer id. Moreover, if a [`PeerId`][peer-id] is given, the peer will need to have known multiaddrs for it in the PeerStore. + #### Returns | Type | Description | @@ -208,7 +215,7 @@ for (const [peerId, connections] of libp2p.connections) { ```js // ... -const conn = await libp2p.dial(remotePeerInfo) +const conn = await libp2p.dial(remotePeerId) // create a new stream within the connection const { stream, protocol } = await conn.newStream(['/echo/1.1.0', '/echo/1.0.0']) @@ -229,11 +236,13 @@ Dials to another peer in the network and selects a protocol to communicate with | Name | Type | Description | |------|------|-------------| -| peer | [`PeerInfo`][peer-info]\|[`PeerId`][peer-id]\|[`Multiaddr`][multiaddr]\|`string` | The peer to dial. If a [`Multiaddr`][multiaddr] or its string is provided, it **must** include the peer id | +| peer | [`PeerId`][peer-id]\|[`Multiaddr`][multiaddr]\|`string` | The peer to dial. | | protocols | `string|Array` | A list of protocols (or single protocol) to negotiate with. Protocols are attempted in order until a match is made. (e.g '/ipfs/bitswap/1.1.0') | | [options] | `object` | dial options | | [options.signal] | [`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal) | An `AbortSignal` instance obtained from an [`AbortController`](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) that can be used to abort the connection before it completes | +**Note:** If a [`Multiaddr`][multiaddr] or its string is provided, it **must** include the peer id. Moreover, if a [`PeerId`][peer-id] is given, the peer will need to have known multiaddrs for it in the PeerStore. + #### Returns | Type | Description | @@ -246,7 +255,7 @@ Dials to another peer in the network and selects a protocol to communicate with // ... const pipe = require('it-pipe') -const { stream, protocol } = await libp2p.dialProtocol(remotePeerInfo, protocols) +const { stream, protocol } = await libp2p.dialProtocol(remotePeerId, protocols) // Use this new stream like any other duplex stream pipe([1, 2, 3], stream, consume) @@ -262,7 +271,7 @@ Attempts to gracefully close an open connection to the given peer. If the connec | Name | Type | Description | |------|------|-------------| -| peer | [`PeerInfo`][peer-info]\|[`PeerId`][peer-id]\|[`Multiaddr`][multiaddr]\|`string` | peer to hang up | +| peer | [`PeerId`][peer-id]\|[`Multiaddr`][multiaddr]\|`string` | peer to hang up | #### Returns @@ -274,7 +283,7 @@ Attempts to gracefully close an open connection to the given peer. If the connec ```js // ... -await libp2p.hangUp(remotePeerInfo) +await libp2p.hangUp(remotePeerId) ``` ### handle @@ -333,7 +342,7 @@ Pings a given peer and get the operation's latency. | Name | Type | Description | |------|------|-------------| -| peer | [`PeerInfo`][peer-info]\|[`PeerId`][peer-id]\|[`Multiaddr`][multiaddr]\|`string` | peer to ping | +| peer | [`PeerId`][peer-id]\|[`Multiaddr`][multiaddr]\|`string` | peer to ping | #### Returns @@ -366,13 +375,13 @@ Iterates over all peer routers in series to find the given peer. If the DHT is e | Type | Description | |------|-------------| -| `Promise` | Peer info of a known peer | +| `Promise<{ id: PeerId, multiaddrs: Multiaddr[] }>` | Peer data of a known peer | #### Example ```js // ... -const peerInfo = await libp2p.peerRouting.findPeer(peerId, options) +const peerData = await libp2p.peerRouting.findPeer(peerId, options) ``` ### contentRouting.findProviders @@ -395,14 +404,14 @@ Once a content router succeeds, the iteration will stop. If the DHT is enabled, | Type | Description | |------|-------------| -| `AsyncIterator` | Async iterator for [`PeerInfo`][peer-info] | +| `AsyncIterable<{ id: PeerId, multiaddrs: Multiaddr[] }` | Async iterator for peer data | #### Example ```js // Iterate over the providers found for the given cid for await (const provider of libp2p.contentRouting.findProviders(cid)) { - console.log(provider) + console.log(provider.id, provider.addrs) } ``` @@ -809,7 +818,7 @@ peerStore.delete(peerId2) ### peerStore.get -Get the stored information of a given peer. +Get the stored information of a given peer, namely its [`PeerId`][peer-id], known [`MultiaddrInfos`][multiaddr-info] and supported protocols. `peerStore.get(peerId)` @@ -823,9 +832,7 @@ Get the stored information of a given peer. | Type | Description | |------|-------------| -| [`PeerInfo`][peer-info] | Peer information of the provided peer | - -TODO: change when `peer-info` is deprecated to new pointer +| `{ id: PeerId, multiaddrInfos: Array, protocols: Array }` | Peer information of the provided peer | #### Example @@ -836,6 +843,7 @@ peerStore.addressBook.set(peerId, multiaddrs) peerStore.protoBook.set(peerId, protocols) peerStore.get(peerId) // { +// id: {}, // MultiaddrInfos: [...], // protocols: [...] // } @@ -851,15 +859,13 @@ Get all the stored information of every peer. | Type | Description | |------|-------------| -| `Map` | Peer information of every peer | - -TODO: change when `peer-info` is deprecated to new pointer (breaking change) +| `Map, protocols: Array }>` | Peer data of every peer known | #### Example ```js -for (let [peerIdString, peerInfo] of peerStore.peers.entries()) { - // peerInfo instance +for (let [peerIdString, peerData] of peerStore.peers.entries()) { + // peerData } ``` @@ -1070,7 +1076,7 @@ Returns the [`Stats`](#stats) object for a given [`PeerId`][peer-id] if it is be #### Example ```js -const peerStats = libp2p.metrics.forPeer(peerInfo) +const peerStats = libp2p.metrics.forPeer(peerId) console.log(peerStats.toJSON()) ``` @@ -1118,7 +1124,7 @@ Once you have a libp2p instance, you can listen to several events it emits, so t If `autoDial` option is `true`, applications should **not** attempt to connect to the peer unless they are performing a specific action. See [peer discovery and auto dial](./PEER_DISCOVERY.md) for more information. -- `peer`: instance of [`PeerInfo`][peer-info] +- `peer`: instance of [`PeerId`][peer-id] #### A new connection to a peer has been opened @@ -1126,7 +1132,7 @@ This event will be triggered anytime a new Connection is established to another `libp2p.on('peer:connect', (peer) => {})` -- `peer`: instance of [`PeerInfo`][peer-info] +- `peer`: instance of [`PeerId`][peer-id] #### An existing connection to a peer has been closed @@ -1134,7 +1140,7 @@ This event will be triggered anytime we are disconnected from another peer, rega `libp2p.on('peer:disconnect', (peer) => {})` -- `peer`: instance of [`PeerInfo`][peer-info] +- `peer`: instance of [`PeerId`][peer-id] ### libp2p.peerStore @@ -1183,4 +1189,3 @@ This event will be triggered anytime we are disconnected from another peer, rega [connection]: https://github.com/libp2p/js-interfaces/tree/master/src/connection [multiaddr]: https://github.com/multiformats/js-multiaddr [peer-id]: https://github.com/libp2p/js-peer-id -[peer-info]: https://github.com/libp2p/js-peer-info diff --git a/package.json b/package.json index 86a71bb0b3..0218decf39 100644 --- a/package.json +++ b/package.json @@ -57,7 +57,7 @@ "it-protocol-buffers": "^0.2.0", "latency-monitor": "~0.2.1", "libp2p-crypto": "^0.17.1", - "libp2p-interfaces": "^0.2.3", + "libp2p-interfaces": "^0.3.0", "libp2p-utils": "^0.1.2", "mafmt": "^7.0.0", "merge-options": "^2.0.0", @@ -69,7 +69,6 @@ "p-fifo": "^1.0.0", "p-settle": "^4.0.0", "peer-id": "^0.13.4", - "peer-info": "^0.17.0", "protons": "^1.0.1", "retimer": "^2.0.0", "timeout-abort-controller": "^1.0.0", @@ -85,20 +84,21 @@ "delay": "^4.3.0", "dirty-chai": "^2.0.1", "interop-libp2p": "libp2p/interop#chore/update-libp2p-daemon-with-peerstore", + "ipfs-http-client": "^44.0.0", "it-concat": "^1.0.0", "it-pair": "^1.0.0", "it-pushable": "^1.4.0", - "libp2p-bootstrap": "^0.10.3", - "libp2p-delegated-content-routing": "^0.4.1", - "libp2p-delegated-peer-routing": "^0.4.0", - "libp2p-floodsub": "^0.20.0", - "libp2p-gossipsub": "^0.2.0", - "libp2p-kad-dht": "^0.19.0-pre.0", - "libp2p-mdns": "^0.13.0", + "libp2p-bootstrap": "^0.11.0", + "libp2p-delegated-content-routing": "^0.5.0", + "libp2p-delegated-peer-routing": "^0.5.0", + "libp2p-floodsub": "^0.21.0", + "libp2p-gossipsub": "^0.4.0", + "libp2p-kad-dht": "^0.19.0", + "libp2p-mdns": "^0.14.0", "libp2p-mplex": "^0.9.1", "libp2p-secio": "^0.12.1", "libp2p-tcp": "^0.14.1", - "libp2p-webrtc-star": "^0.17.0", + "libp2p-webrtc-star": "^0.18.0", "libp2p-websockets": "^0.13.1", "nock": "^12.0.0", "p-defer": "^3.0.0", diff --git a/src/circuit/circuit/hop.js b/src/circuit/circuit/hop.js index 0db27c0d53..0a5e71c8ba 100644 --- a/src/circuit/circuit/hop.js +++ b/src/circuit/circuit/hop.js @@ -1,7 +1,9 @@ 'use strict' const debug = require('debug') -const PeerInfo = require('peer-info') +const log = debug('libp2p:circuit:hop') +log.error = debug('libp2p:circuit:hop:error') + const PeerId = require('peer-id') const { validateAddrs } = require('./utils') const StreamHandler = require('./stream-handler') @@ -14,9 +16,6 @@ const { stop } = require('./stop') const multicodec = require('./../multicodec') -const log = debug('libp2p:circuit:hop') -log.error = debug('libp2p:circuit:hop:error') - module.exports.handleHop = async function handleHop ({ connection, request, @@ -42,7 +41,7 @@ module.exports.handleHop = async function handleHop ({ // Get the connection to the destination (stop) peer const destinationPeer = new PeerId(request.dstPeer.id) - const destinationConnection = circuit._registrar.getConnection(new PeerInfo(destinationPeer)) + const destinationConnection = circuit._registrar.getConnection(destinationPeer) if (!destinationConnection && !circuit._options.hop.active) { log('HOP request received but we are not connected to the destination peer') return streamHandler.end({ diff --git a/src/circuit/index.js b/src/circuit/index.js index a5451d0653..be8326aaa7 100644 --- a/src/circuit/index.js +++ b/src/circuit/index.js @@ -3,7 +3,6 @@ const mafmt = require('mafmt') const multiaddr = require('multiaddr') const PeerId = require('peer-id') -const PeerInfo = require('peer-info') const withIs = require('class-is') const { CircuitRelay: CircuitPB } = require('./protocol') @@ -32,7 +31,8 @@ class Circuit { this._registrar = libp2p.registrar this._upgrader = upgrader this._options = libp2p._config.relay - this.peerInfo = libp2p.peerInfo + this.addresses = libp2p.addresses + this.peerId = libp2p.peerId this._registrar.handle(multicodec, this._onProtocol.bind(this)) } @@ -107,7 +107,7 @@ class Circuit { const destinationPeer = PeerId.createFromCID(destinationAddr.getPeerId()) let disconnectOnFailure = false - let relayConnection = this._registrar.getConnection(new PeerInfo(relayPeer)) + let relayConnection = this._registrar.getConnection(relayPeer) if (!relayConnection) { relayConnection = await this._dialer.connectToPeer(relayAddr, options) disconnectOnFailure = true @@ -120,8 +120,8 @@ class Circuit { request: { type: CircuitPB.Type.HOP, srcPeer: { - id: this.peerInfo.id.toBytes(), - addrs: this.peerInfo.multiaddrs.toArray().map(addr => addr.buffer) + id: this.peerId.toBytes(), + addrs: this.addresses.listen.map(addr => addr.buffer) }, dstPeer: { id: destinationPeer.toBytes(), @@ -130,7 +130,7 @@ class Circuit { } }) - const localAddr = relayAddr.encapsulate(`/p2p-circuit/p2p/${this.peerInfo.id.toB58String()}`) + const localAddr = relayAddr.encapsulate(`/p2p-circuit/p2p/${this.peerId.toB58String()}`) const maConn = toConnection({ stream: virtualConnection, remoteAddr: ma, diff --git a/src/config.js b/src/config.js index 11b0ce4bb9..618c35d9e2 100644 --- a/src/config.js +++ b/src/config.js @@ -4,6 +4,9 @@ const mergeOptions = require('merge-options') const Constants = require('./constants') const DefaultConfig = { + addresses: { + listen: [] + }, connectionManager: { minPeers: 25 }, diff --git a/src/connection-manager/index.js b/src/connection-manager/index.js index 93b3c99c32..0868226f56 100644 --- a/src/connection-manager/index.js +++ b/src/connection-manager/index.js @@ -40,7 +40,7 @@ class ConnectionManager { constructor (libp2p, options) { this._libp2p = libp2p this._registrar = libp2p.registrar - this._peerId = libp2p.peerInfo.id.toB58String() + this._peerId = libp2p.peerId.toB58String() this._options = mergeOptions.call({ ignoreUndefined: true }, defaultOptions, options) if (this._options.maxConnections < this._options.minConnections) { throw errcode(new Error('Connection Manager maxConnections must be greater than minConnections'), ERR_INVALID_PARAMETERS) diff --git a/src/content-routing.js b/src/content-routing.js index 87dfc64061..bf63e773e6 100644 --- a/src/content-routing.js +++ b/src/content-routing.js @@ -24,7 +24,7 @@ module.exports = (node) => { * @param {object} [options] * @param {number} [options.timeout] How long the query should run * @param {number} [options.maxNumProviders] - maximum number of providers to find - * @returns {AsyncIterable} + * @returns {AsyncIterable<{ id: PeerId, multiaddrs: Multiaddr[] }>} */ async * findProviders (key, options) { if (!routers.length) { @@ -42,8 +42,8 @@ module.exports = (node) => { }) ) - for (const pInfo of result) { - yield pInfo + for (const peerData of result) { + yield peerData } }, diff --git a/src/dialer/index.js b/src/dialer/index.js index de9a394394..92b04153c3 100644 --- a/src/dialer/index.js +++ b/src/dialer/index.js @@ -4,11 +4,12 @@ const multiaddr = require('multiaddr') const errCode = require('err-code') const TimeoutController = require('timeout-abort-controller') const anySignal = require('any-signal') -const PeerId = require('peer-id') const debug = require('debug') const log = debug('libp2p:dialer') log.error = debug('libp2p:dialer:error') + const { DialRequest } = require('./dial-request') +const getPeerId = require('../get-peer-id') const { codes } = require('../errors') const { @@ -57,18 +58,19 @@ class Dialer { } /** - * Connects to a given `PeerId` or `Multiaddr` by dialing all of its known addresses. + * Connects to a given `peer` by dialing all of its known addresses. * The dial to the first address that is successfully able to upgrade a connection * will be used. * - * @param {PeerId|Multiaddr} peerId The peer to dial + * @param {PeerId|Multiaddr|string} peer The peer to dial * @param {object} [options] * @param {AbortSignal} [options.signal] An AbortController signal * @returns {Promise} */ - async connectToPeer (peerId, options = {}) { - const dialTarget = this._createDialTarget(peerId) - if (dialTarget.addrs.length === 0) { + async connectToPeer (peer, options = {}) { + const dialTarget = this._createDialTarget(peer) + + if (!dialTarget.addrs.length) { throw errCode(new Error('The dial request has no addresses'), codes.ERR_NO_VALID_ADDRESSES) } const pendingDial = this._pendingDials.get(dialTarget.id) || this._createPendingDial(dialTarget, options) @@ -98,24 +100,24 @@ class Dialer { /** * Creates a DialTarget. The DialTarget is used to create and track * the DialRequest to a given peer. + * If a multiaddr is received it should be the first address attempted. * @private - * @param {PeerId|Multiaddr} peer A PeerId or Multiaddr + * @param {PeerId|Multiaddr|string} peer A PeerId or Multiaddr * @returns {DialTarget} */ _createDialTarget (peer) { - const dialable = Dialer.getDialable(peer) - if (multiaddr.isMultiaddr(dialable)) { - return { - id: dialable.toString(), - addrs: [dialable] - } - } + const peerId = getPeerId(peer, this.peerStore) + let addrs = this.peerStore.addressBook.getMultiaddrsForPeer(peerId) - dialable.multiaddrs && this.peerStore.addressBook.add(dialable.id, Array.from(dialable.multiaddrs)) - const addrs = this.peerStore.addressBook.getMultiaddrsForPeer(dialable.id) + // If received a multiaddr to dial, it should be the first to use + // But, if we know other multiaddrs for the peer, we should try them too. + if (multiaddr.isMultiaddr(peer)) { + addrs = addrs.filter((addr) => !peer.equals(addr)) + addrs.unshift(peer) + } return { - id: dialable.id.toB58String(), + id: peerId.toB58String(), addrs } } @@ -180,44 +182,6 @@ class Dialer { log('token %d released', token) this.tokens.push(token) } - - /** - * PeerInfo object - * @typedef {Object} peerInfo - * @property {Multiaddr} multiaddr peer multiaddr. - * @property {PeerId} id peer id. - */ - - /** - * Converts the given `peer` into a `PeerInfo` or `Multiaddr`. - * @static - * @param {PeerId|Multiaddr|string} peer - * @returns {peerInfo|Multiaddr} - */ - static getDialable (peer) { - if (typeof peer === 'string') { - peer = multiaddr(peer) - } - - let addrs - if (multiaddr.isMultiaddr(peer)) { - addrs = new Set([peer]) // TODO: after peer-info removal, a Set should not be needed - try { - peer = PeerId.createFromCID(peer.getPeerId()) - } catch (err) { - throw errCode(new Error('The multiaddr did not contain a valid peer id'), codes.ERR_INVALID_PEER) - } - } - - if (PeerId.isPeerId(peer)) { - peer = { - id: peer, - multiaddrs: addrs - } - } - - return peer - } } module.exports = Dialer diff --git a/src/errors.js b/src/errors.js index 32b21ff048..ab8f21369d 100644 --- a/src/errors.js +++ b/src/errors.js @@ -26,5 +26,6 @@ exports.codes = { ERR_TIMEOUT: 'ERR_TIMEOUT', ERR_TRANSPORT_UNAVAILABLE: 'ERR_TRANSPORT_UNAVAILABLE', ERR_TRANSPORT_DIAL_FAILED: 'ERR_TRANSPORT_DIAL_FAILED', - ERR_UNSUPPORTED_PROTOCOL: 'ERR_UNSUPPORTED_PROTOCOL' + ERR_UNSUPPORTED_PROTOCOL: 'ERR_UNSUPPORTED_PROTOCOL', + ERR_INVALID_MULTIADDR: 'ERR_INVALID_MULTIADDR' } diff --git a/src/get-peer-id.js b/src/get-peer-id.js new file mode 100644 index 0000000000..9eb687e8bb --- /dev/null +++ b/src/get-peer-id.js @@ -0,0 +1,41 @@ +'use strict' + +const PeerId = require('peer-id') +const multiaddr = require('multiaddr') +const errCode = require('err-code') + +const { codes } = require('./errors') + +/** + * Converts the given `peer` to a `PeerId` instance. + * If a multiaddr is received, the addressBook is updated. + * @param {PeerId|Multiaddr|string} peer + * @param {PeerStore} peerStore + * @returns {PeerId} + */ +function getPeerId (peer, peerStore) { + if (typeof peer === 'string') { + peer = multiaddr(peer) + } + + let addr + if (multiaddr.isMultiaddr(peer)) { + addr = peer + try { + peer = PeerId.createFromB58String(peer.getPeerId()) + } catch (err) { + throw errCode( + new Error(`${peer} is not a valid peer type`), + codes.ERR_INVALID_MULTIADDR + ) + } + } + + if (addr && peerStore) { + peerStore.addressBook.add(peer, [addr]) + } + + return peer +} + +module.exports = getPeerId diff --git a/src/get-peer-info.js b/src/get-peer-info.js deleted file mode 100644 index 5b0748ea8e..0000000000 --- a/src/get-peer-info.js +++ /dev/null @@ -1,78 +0,0 @@ -'use strict' - -const PeerId = require('peer-id') -const PeerInfo = require('peer-info') -const multiaddr = require('multiaddr') -const errCode = require('err-code') - -/** - * Converts the given `peer` to a `PeerInfo` instance. - * The `PeerStore` will be checked for the resulting peer, and - * the peer will be updated in the `PeerStore`. - * - * @param {PeerInfo|PeerId|Multiaddr|string} peer - * @param {PeerStore} peerStore - * @returns {PeerInfo} - */ -function getPeerInfo (peer, peerStore) { - if (typeof peer === 'string') { - peer = multiaddr(peer) - } - - let addr - if (multiaddr.isMultiaddr(peer)) { - addr = peer - try { - peer = PeerId.createFromB58String(peer.getPeerId()) - } catch (err) { - throw errCode( - new Error(`${peer} is not a valid peer type`), - 'ERR_INVALID_MULTIADDR' - ) - } - } - - if (PeerId.isPeerId(peer)) { - peer = new PeerInfo(peer) - } - - addr && peer.multiaddrs.add(addr) - - if (peerStore) { - peerStore.addressBook.add(peer.id, peer.multiaddrs.toArray()) - peerStore.protoBook.add(peer.id, Array.from(peer.protocols)) - } - - return peer -} - -/** - * If `getPeerInfo` does not return a peer with multiaddrs, - * the `libp2p` PeerRouter will be used to attempt to find the peer. - * - * @async - * @param {PeerInfo|PeerId|Multiaddr|string} peer - * @param {Libp2p} libp2p - * @returns {Promise} - */ -function getPeerInfoRemote (peer, libp2p) { - let peerInfo - - try { - peerInfo = getPeerInfo(peer, libp2p.peerStore) - } catch (err) { - throw errCode(err, 'ERR_INVALID_PEER_TYPE') - } - - // If we don't have an address for the peer, attempt to find it - if (peerInfo.multiaddrs.size < 1) { - return libp2p.peerRouting.findPeer(peerInfo.id) - } - - return peerInfo -} - -module.exports = { - getPeerInfoRemote, - getPeerInfo -} diff --git a/src/identify/index.js b/src/identify/index.js index 3fe06f4f5c..f64e19f888 100644 --- a/src/identify/index.js +++ b/src/identify/index.js @@ -47,7 +47,8 @@ class IdentifyService { * @param {object} options * @param {Registrar} options.registrar * @param {Map} options.protocols A reference to the protocols we support - * @param {PeerInfo} options.peerInfo The peer running the identify service + * @param {PeerId} options.peerId The peer running the identify service + * @param {{ listen: Array}} options.addresses The peer aaddresses */ constructor (options) { /** @@ -55,9 +56,11 @@ class IdentifyService { */ this.registrar = options.registrar /** - * @property {PeerInfo} + * @property {PeerId} */ - this.peerInfo = options.peerInfo + this.peerId = options.peerId + + this.addresses = options.addresses || {} this._protocols = options.protocols @@ -76,7 +79,7 @@ class IdentifyService { await pipe( [{ - listenAddrs: this.peerInfo.multiaddrs.toArray().map((ma) => ma.buffer), + listenAddrs: this.addresses.listen.map((ma) => ma.buffer), protocols: Array.from(this._protocols.keys()) }], pb.encode(Message), @@ -100,7 +103,7 @@ class IdentifyService { const connections = [] let connection for (const peer of peerStore.peers.values()) { - if (peer.protocols.has(MULTICODEC_IDENTIFY_PUSH) && (connection = this.registrar.getConnection(peer))) { + if (peer.protocols.includes(MULTICODEC_IDENTIFY_PUSH) && (connection = this.registrar.getConnection(peer.id))) { connections.push(connection) } } @@ -193,15 +196,15 @@ class IdentifyService { */ _handleIdentify ({ connection, stream }) { let publicKey = Buffer.alloc(0) - if (this.peerInfo.id.pubKey) { - publicKey = this.peerInfo.id.pubKey.bytes + if (this.peerId.pubKey) { + publicKey = this.peerId.pubKey.bytes } const message = Message.encode({ protocolVersion: PROTOCOL_VERSION, agentVersion: AGENT_VERSION, publicKey, - listenAddrs: this.peerInfo.multiaddrs.toArray().map((ma) => ma.buffer), + listenAddrs: this.addresses.listen.map((ma) => ma.buffer), observedAddr: connection.remoteAddr.buffer, protocols: Array.from(this._protocols.keys()) }) diff --git a/src/index.js b/src/index.js index 1c4c587999..b90382ed08 100644 --- a/src/index.js +++ b/src/index.js @@ -5,12 +5,12 @@ const debug = require('debug') const log = debug('libp2p') log.error = debug('libp2p:error') -const PeerInfo = require('peer-info') +const PeerId = require('peer-id') const peerRouting = require('./peer-routing') const contentRouting = require('./content-routing') const pubsub = require('./pubsub') -const { getPeerInfo } = require('./get-peer-info') +const getPeerId = require('./get-peer-id') const { validate: validateConfig } = require('./config') const { codes } = require('./errors') @@ -42,9 +42,12 @@ class Libp2p extends EventEmitter { this._options = validateConfig(_options) this.datastore = this._options.datastore - this.peerInfo = this._options.peerInfo + this.peerId = this._options.peerId this.peerStore = new PeerStore() + // Addresses {listen, announce, noAnnounce} + this.addresses = this._options.addresses + this._modules = this._options.modules this._config = this._options.config this._transport = [] // Transport instances/references @@ -56,29 +59,31 @@ class Libp2p extends EventEmitter { // Setup the Upgrader this.upgrader = new Upgrader({ - localPeer: this.peerInfo.id, + localPeer: this.peerId, metrics: this.metrics, onConnection: (connection) => { - const peerInfo = new PeerInfo(connection.remotePeer) - this.registrar.onConnect(peerInfo, connection) + const peerId = connection.remotePeer + + this.registrar.onConnect(peerId, connection) this.connectionManager.onConnect(connection) - this.emit('peer:connect', peerInfo) + this.emit('peer:connect', peerId) // Run identify for every connection if (this.identifyService) { - this.identifyService.identify(connection, connection.remotePeer) + this.identifyService.identify(connection, peerId) .catch(log.error) } }, onConnectionEnd: (connection) => { - const peerInfo = Dialer.getDialable(connection.remotePeer) - this.registrar.onDisconnect(peerInfo, connection) + const peerId = connection.remotePeer + + this.registrar.onDisconnect(peerId, connection) this.connectionManager.onDisconnect(connection) // If there are no connections to the peer, disconnect - if (!this.registrar.getConnection(peerInfo)) { - this.emit('peer:disconnect', peerInfo) - this.metrics && this.metrics.onPeerDisconnected(peerInfo.id) + if (!this.registrar.getConnection(peerId)) { + this.emit('peer:disconnect', peerId) + this.metrics && this.metrics.onPeerDisconnected(peerId) } } }) @@ -133,7 +138,8 @@ class Libp2p extends EventEmitter { // Add the identify service since we can multiplex this.identifyService = new IdentifyService({ registrar: this.registrar, - peerInfo: this.peerInfo, + peerId: this.peerId, + addresses: this.addresses, protocols: this.upgrader.protocols }) this.handle(Object.values(IDENTIFY_PROTOCOLS), this.identifyService.handleMessage) @@ -151,7 +157,7 @@ class Libp2p extends EventEmitter { const DHT = this._modules.dht this._dht = new DHT({ dialer: this.dialer, - peerInfo: this.peerInfo, + peerId: this.peerId, peerStore: this.peerStore, registrar: this.registrar, datastore: this.datastore, @@ -261,10 +267,9 @@ class Libp2p extends EventEmitter { } /** - * Dials to the provided peer. If successful, the `PeerInfo` of the + * Dials to the provided peer. If successful, the known `PeerData` of the * peer will be added to the nodes `peerStore` - * - * @param {PeerInfo|PeerId|Multiaddr|string} peer The peer to dial + * @param {PeerId|Multiaddr|string} peer The peer to dial * @param {object} options * @param {AbortSignal} [options.signal] * @returns {Promise} @@ -275,30 +280,21 @@ class Libp2p extends EventEmitter { /** * Dials to the provided peer and handshakes with the given protocol. - * If successful, the `PeerInfo` of the peer will be added to the nodes `peerStore`, - * and the `Connection` will be sent in the callback - * + * If successful, the known `PeerData` of the peer will be added to the nodes `peerStore`, + * and the `Connection` will be returned * @async - * @param {PeerInfo|PeerId|Multiaddr|string} peer The peer to dial + * @param {PeerId|Multiaddr|string} peer The peer to dial * @param {string[]|string} protocols * @param {object} options * @param {AbortSignal} [options.signal] * @returns {Promise} */ async dialProtocol (peer, protocols, options) { - const dialable = Dialer.getDialable(peer) - let connection - if (PeerInfo.isPeerInfo(dialable)) { - // TODO Inconsistency from: getDialable adds a set, while regular peerInfo uses a Multiaddr set - // This should be handled on `peer-info` removal - const multiaddrs = dialable.multiaddrs.toArray ? dialable.multiaddrs.toArray() : Array.from(dialable.multiaddrs) - this.peerStore.addressBook.add(dialable.id, multiaddrs) - - connection = this.registrar.getConnection(dialable) - } + const peerId = getPeerId(peer, this.peerStore) + let connection = this.registrar.getConnection(peerId) if (!connection) { - connection = await this.dialer.connectToPeer(dialable, options) + connection = await this.dialer.connectToPeer(peer, options) } // If a protocol was provided, create a new stream @@ -311,28 +307,28 @@ class Libp2p extends EventEmitter { /** * Disconnects all connections to the given `peer` - * - * @param {PeerInfo|PeerId|multiaddr|string} peer the peer to close connections to + * @param {PeerId|multiaddr|string} peer the peer to close connections to * @returns {Promise} */ hangUp (peer) { - const peerInfo = getPeerInfo(peer, this.peerStore) + const peerId = getPeerId(peer) + return Promise.all( - this.registrar.connections.get(peerInfo.id.toB58String()).map(connection => { + this.registrar.connections.get(peerId.toB58String()).map(connection => { return connection.close() }) ) } /** - * Pings the given peer - * @param {PeerInfo|PeerId|Multiaddr|string} peer The peer to ping + * Pings the given peer in order to obtain the operation latency. + * @param {PeerId|Multiaddr|string} peer The peer to ping * @returns {Promise} */ - async ping (peer) { - const peerInfo = await getPeerInfo(peer, this.peerStore) + ping (peer) { + const peerId = getPeerId(peer) - return ping(this, peerInfo.id) + return ping(this, peerId) } /** @@ -370,17 +366,14 @@ class Libp2p extends EventEmitter { } async _onStarting () { - // Listen on the addresses supplied in the peerInfo - const multiaddrs = this.peerInfo.multiaddrs.toArray() + // Listen on the addresses provided + const multiaddrs = this.addresses.listen await this.transportManager.listen(multiaddrs) // The addresses may change once the listener starts // eg /ip4/0.0.0.0/tcp/0 => /ip4/192.168.1.0/tcp/58751 - this.peerInfo.multiaddrs.clear() - for (const ma of this.transportManager.getAddrs()) { - this.peerInfo.multiaddrs.add(ma) - } + this.addresses.listen = this.transportManager.getAddrs() if (this._config.pubsub.enabled) { this.pubsub && this.pubsub.start() @@ -408,18 +401,18 @@ class Libp2p extends EventEmitter { this.connectionManager.start() - this.peerStore.on('peer', peerInfo => { - this.emit('peer:discovery', peerInfo) - this._maybeConnect(peerInfo) + this.peerStore.on('peer', peerId => { + this.emit('peer:discovery', peerId) + this._maybeConnect(peerId) }) // Peer discovery await this._setupPeerDiscovery() // Once we start, emit and dial any peers we may have already discovered - for (const peerInfo of this.peerStore.peers.values()) { - this.emit('peer:discovery', peerInfo) - this._maybeConnect(peerInfo) + for (const peerData of this.peerStore.peers.values()) { + this.emit('peer:discovery', peerData.id) + this._maybeConnect(peerData.id) } } @@ -427,34 +420,33 @@ class Libp2p extends EventEmitter { * Called whenever peer discovery services emit `peer` events. * Known peers may be emitted. * @private - * @param {PeerInfo} peerInfo + * @param {PeerDara} peerData */ - _onDiscoveryPeer (peerInfo) { - if (peerInfo.id.toB58String() === this.peerInfo.id.toB58String()) { + _onDiscoveryPeer (peerData) { + if (peerData.id.toB58String() === this.peerId.toB58String()) { log.error(new Error(codes.ERR_DISCOVERED_SELF)) return } - // TODO: once we deprecate peer-info, we should only set if we have data - this.peerStore.addressBook.add(peerInfo.id, peerInfo.multiaddrs.toArray()) - this.peerStore.protoBook.set(peerInfo.id, Array.from(peerInfo.protocols)) + peerData.multiaddrs && this.peerStore.addressBook.add(peerData.id, peerData.multiaddrs) + peerData.protocols && this.peerStore.protoBook.set(peerData.id, peerData.protocols) } /** - * Will dial to the given `peerInfo` if the current number of + * Will dial to the given `peerId` if the current number of * connected peers is less than the configured `ConnectionManager` * minPeers. * @private - * @param {PeerInfo} peerInfo + * @param {PeerId} peerId */ - async _maybeConnect (peerInfo) { + async _maybeConnect (peerId) { // If auto dialing is on and we have no connection to the peer, check if we should dial - if (this._config.peerDiscovery.autoDial === true && !this.registrar.getConnection(peerInfo)) { + if (this._config.peerDiscovery.autoDial === true && !this.registrar.getConnection(peerId)) { const minPeers = this._options.connectionManager.minPeers || 0 if (minPeers > this.connectionManager._connections.size) { - log('connecting to discovered peer %s', peerInfo.id.toB58String()) + log('connecting to discovered peer %s', peerId.toB58String()) try { - await this.dialer.connectToPeer(peerInfo) + await this.dialer.connectToPeer(peerId) } catch (err) { log.error('could not connect to discovered peer', err) } @@ -485,7 +477,11 @@ class Libp2p extends EventEmitter { let discoveryService if (typeof DiscoveryService === 'function') { - discoveryService = new DiscoveryService(Object.assign({}, config, { peerInfo: this.peerInfo, libp2p: this })) + discoveryService = new DiscoveryService(Object.assign({}, config, { + peerId: this.peerId, + multiaddrs: this.addresses.listen, + libp2p: this + })) } else { discoveryService = DiscoveryService } @@ -512,19 +508,19 @@ class Libp2p extends EventEmitter { } /** - * Like `new Libp2p(options)` except it will create a `PeerInfo` + * Like `new Libp2p(options)` except it will create a `PeerId` * instance if one is not provided in options. * @param {object} options Libp2p configuration options * @returns {Libp2p} */ Libp2p.create = async function create (options = {}) { - if (options.peerInfo) { + if (options.peerId) { return new Libp2p(options) } - const peerInfo = await PeerInfo.create() + const peerId = await PeerId.create() - options.peerInfo = peerInfo + options.peerId = peerId return new Libp2p(options) } diff --git a/src/peer-routing.js b/src/peer-routing.js index a3eac01d7e..a38eb53b17 100644 --- a/src/peer-routing.js +++ b/src/peer-routing.js @@ -18,7 +18,7 @@ module.exports = (node) => { * @param {String} id The id of the peer to find * @param {object} [options] * @param {number} [options.timeout] How long the query should run - * @returns {Promise} + * @returns {Promise<{ id: PeerId, multiaddrs: Multiaddr[] }>} */ findPeer: async (id, options) => { // eslint-disable-line require-await if (!routers.length) { diff --git a/src/peer-store/address-book.js b/src/peer-store/address-book.js index 6ae4623041..271c4cfea2 100644 --- a/src/peer-store/address-book.js +++ b/src/peer-store/address-book.js @@ -7,7 +7,6 @@ log.error = debug('libp2p:peer-store:address-book:error') const multiaddr = require('multiaddr') const PeerId = require('peer-id') -const PeerInfo = require('peer-info') const Book = require('./book') @@ -83,19 +82,13 @@ class AddressBook extends Book { this._setPeerId(peerId) log(`stored provided multiaddrs for ${id}`) - // TODO: Remove peerInfo and its usage on peer-info deprecate - const peerInfo = new PeerInfo(peerId) - multiaddrInfos.forEach((mi) => peerInfo.multiaddrs.add(mi.multiaddr)) - // Notify the existance of a new peer if (!rec) { - // this._ps.emit('peer', peerId) - this._ps.emit('peer', peerInfo) + this._ps.emit('peer', peerId) } this._ps.emit('change:multiaddrs', { peerId, - peerInfo, multiaddrs: multiaddrInfos.map((mi) => mi.multiaddr) }) @@ -139,20 +132,14 @@ class AddressBook extends Book { log(`added provided multiaddrs for ${id}`) - // TODO: Remove peerInfo and its usage on peer-info deprecate - const peerInfo = new PeerInfo(peerId) - multiaddrInfos.forEach((mi) => peerInfo.multiaddrs.add(mi.multiaddr)) - this._ps.emit('change:multiaddrs', { peerId, - peerInfo, multiaddrs: multiaddrInfos.map((mi) => mi.multiaddr) }) // Notify the existance of a new peer if (!rec) { - // this._ps.emit('peer', peerId) - this._ps.emit('peer', peerInfo) + this._ps.emit('peer', peerId) } return this diff --git a/src/peer-store/book.js b/src/peer-store/book.js index c44b55d5c2..ba2ff13f71 100644 --- a/src/peer-store/book.js +++ b/src/peer-store/book.js @@ -2,7 +2,6 @@ const errcode = require('err-code') const PeerId = require('peer-id') -const PeerInfo = require('peer-info') const { ERR_INVALID_PARAMETERS @@ -71,12 +70,8 @@ class Book { return false } - // TODO: Remove peerInfo and its usage on peer-info deprecate - const peerInfo = new PeerInfo(peerId) - this._ps.emit(this.eventName, { peerId, - peerInfo, [this.eventProperty]: [] }) diff --git a/src/peer-store/index.js b/src/peer-store/index.js index 589a186dca..2f3b25ea26 100644 --- a/src/peer-store/index.js +++ b/src/peer-store/index.js @@ -6,9 +6,7 @@ const log = debug('libp2p:peer-store') log.error = debug('libp2p:peer-store:error') const { EventEmitter } = require('events') - const PeerId = require('peer-id') -const PeerInfo = require('peer-info') const AddressBook = require('./address-book') const ProtoBook = require('./proto-book') @@ -25,8 +23,9 @@ const { */ class PeerStore extends EventEmitter { /** - * PeerInfo object - * @typedef {Object} peerInfo + * PeerData object + * @typedef {Object} PeerData + * @property {PeerId} id peer's peer-id instance. * @property {Array} multiaddrsInfos peer's information of the multiaddrs. * @property {Array} protocols peer's supported protocols. */ @@ -54,49 +53,35 @@ class PeerStore extends EventEmitter { /** * Get all the stored information of every peer. - * @returns {Map} + * @returns {Map} */ get peers () { - const peerInfos = new Map() + const peersData = new Map() // AddressBook for (const [idStr, multiaddrInfos] of this.addressBook.data.entries()) { - // TODO: Remove peerInfo and its usage on peer-info deprecate - const peerInfo = new PeerInfo(PeerId.createFromCID(idStr)) - - multiaddrInfos.forEach((mi) => peerInfo.multiaddrs.add((mi.multiaddr))) - - const protocols = this.protoBook.data.get(idStr) || [] - protocols.forEach((p) => peerInfo.protocols.add(p)) - - peerInfos.set(idStr, peerInfo) - // TODO - // peerInfos.set(idStr, { - // id: PeerId.createFromCID(idStr), - // multiaddrInfos, - // protocols: this.protoBook.data.get(idStr) || [] - // }) + const id = PeerId.createFromCID(idStr) + peersData.set(idStr, { + id, + multiaddrInfos, + protocols: this.protoBook.get(id) || [] + }) } // ProtoBook for (const [idStr, protocols] of this.protoBook.data.entries()) { - // TODO: Remove peerInfo and its usage on peer-info deprecate - const peerInfo = peerInfos.get(idStr) - - if (!peerInfo) { - const peerInfo = new PeerInfo(PeerId.createFromCID(idStr)) - - protocols.forEach((p) => peerInfo.protocols.add(p)) - peerInfos.set(idStr, peerInfo) - // peerInfos.set(idStr, { - // id: PeerId.createFromCID(idStr), - // multiaddrInfos: [], - // protocols: protocols - // }) + const pData = peersData.get(idStr) + + if (!pData) { + peersData.set(idStr, { + id: PeerId.createFromCID(idStr), + multiaddrInfos: [], + protocols: Array.from(protocols) + }) } } - return peerInfos + return peersData } /** @@ -113,7 +98,7 @@ class PeerStore extends EventEmitter { /** * Get the stored information of a given peer. * @param {PeerId} peerId - * @returns {peerInfo} + * @returns {PeerData} */ get (peerId) { if (!PeerId.isPeerId(peerId)) { diff --git a/src/peer-store/proto-book.js b/src/peer-store/proto-book.js index 7f73812524..ea395dfdf6 100644 --- a/src/peer-store/proto-book.js +++ b/src/peer-store/proto-book.js @@ -6,7 +6,6 @@ const log = debug('libp2p:peer-store:proto-book') log.error = debug('libp2p:peer-store:proto-book:error') const PeerId = require('peer-id') -const PeerInfo = require('peer-info') const Book = require('./book') @@ -74,13 +73,8 @@ class ProtoBook extends Book { this._setPeerId(peerId) log(`stored provided protocols for ${id}`) - // TODO: Remove peerInfo and its usage on peer-info deprecate - const peerInfo = new PeerInfo(peerId) - protocols.forEach((p) => peerInfo.protocols.add(p)) - this._ps.emit('change:protocols', { peerId, - peerInfo, protocols }) @@ -122,13 +116,8 @@ class ProtoBook extends Book { this._setPeerId(peerId) log(`added provided protocols for ${id}`) - // TODO: Remove peerInfo and its usage on peer-info deprecate - const peerInfo = new PeerInfo(peerId) - protocols.forEach((p) => peerInfo.protocols.add(p)) - this._ps.emit('change:protocols', { peerId, - peerInfo, protocols }) diff --git a/src/pubsub.js b/src/pubsub.js index 3f62358672..a3ee7282bf 100644 --- a/src/pubsub.js +++ b/src/pubsub.js @@ -4,7 +4,7 @@ const errCode = require('err-code') const { messages, codes } = require('./errors') module.exports = (node, Pubsub, config) => { - const pubsub = new Pubsub(node.peerInfo, node.registrar, config) + const pubsub = new Pubsub(node.peerId, node.registrar, config) return { /** diff --git a/src/registrar.js b/src/registrar.js index 2aa6bcf85e..fbe7acb532 100644 --- a/src/registrar.js +++ b/src/registrar.js @@ -5,6 +5,8 @@ const errcode = require('err-code') const log = debug('libp2p:peer-store') log.error = debug('libp2p:peer-store:error') +const PeerId = require('peer-id') + const { ERR_INVALID_PARAMETERS } = require('./errors') @@ -69,22 +71,20 @@ class Registrar { /** * Add a new connected peer to the record * TODO: this should live in the ConnectionManager - * @param {PeerInfo} peerInfo + * @param {PeerId} peerId * @param {Connection} conn * @returns {void} */ - onConnect (peerInfo, conn) { - // TODO: This is not a `peer-info` instance anymore, but an object with the data. - // This can be modified to `peer-id` though, once `peer-info` is deprecated. - // if (!PeerInfo.isPeerInfo(peerInfo)) { - // throw errcode(new Error('peerInfo must be an instance of peer-info'), ERR_INVALID_PARAMETERS) - // } + onConnect (peerId, conn) { + if (!PeerId.isPeerId(peerId)) { + throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS) + } if (!Connection.isConnection(conn)) { throw errcode(new Error('conn must be an instance of interface-connection'), ERR_INVALID_PARAMETERS) } - const id = peerInfo.id.toB58String() + const id = peerId.toB58String() const storedConn = this.connections.get(id) if (storedConn) { @@ -97,19 +97,17 @@ class Registrar { /** * Remove a disconnected peer from the record * TODO: this should live in the ConnectionManager - * @param {PeerInfo} peerInfo + * @param {PeerId} peerId * @param {Connection} connection * @param {Error} [error] * @returns {void} */ - onDisconnect (peerInfo, connection, error) { - // TODO: This is not a `peer-info` instance anymore, but an object with the data. - // This can be modified to `peer-id` though, once `peer-info` is deprecated. - // if (!PeerInfo.isPeerInfo(peerInfo)) { - // throw errcode(new Error('peerInfo must be an instance of peer-info'), ERR_INVALID_PARAMETERS) - // } - - const id = peerInfo.id.toB58String() + onDisconnect (peerId, connection, error) { + if (!PeerId.isPeerId(peerId)) { + throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS) + } + + const id = peerId.toB58String() let storedConn = this.connections.get(id) if (storedConn && storedConn.length > 1) { @@ -117,26 +115,24 @@ class Registrar { this.connections.set(id, storedConn) } else if (storedConn) { for (const [, topology] of this.topologies) { - topology.disconnect(peerInfo, error) + topology.disconnect(peerId, error) } - this.connections.delete(peerInfo.id.toB58String()) + this.connections.delete(id) } } /** * Get a connection with a peer. - * @param {PeerInfo} peerInfo + * @param {PeerId} peerId * @returns {Connection} */ - getConnection (peerInfo) { - // TODO: This is not a `peer-info` instance anymore, but an object with the data. - // This can be modified to `peer-id` though, once `peer-info` is deprecated. - // if (!PeerInfo.isPeerInfo(peerInfo)) { - // throw errcode(new Error('peerInfo must be an instance of peer-info'), ERR_INVALID_PARAMETERS) - // } - - const connections = this.connections.get(peerInfo.id.toB58String()) + getConnection (peerId) { + if (!PeerId.isPeerId(peerId)) { + throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS) + } + + const connections = this.connections.get(peerId.toB58String()) // Return the first, open connection if (connections) { return connections.find(connection => connection.stat.status === 'open') diff --git a/test/content-routing/content-routing.node.js b/test/content-routing/content-routing.node.js index a8470da335..1fc7967dcd 100644 --- a/test/content-routing/content-routing.node.js +++ b/test/content-routing/content-routing.node.js @@ -56,7 +56,7 @@ describe('content-routing', () => { // Ring dial await Promise.all( - nodes.map((peer, i) => peer.dial(nodes[(i + 1) % number].peerInfo)) + nodes.map((peer, i) => peer.dial(nodes[(i + 1) % number].peerId)) ) }) @@ -96,9 +96,9 @@ describe('content-routing', () => { let delegate beforeEach(async () => { - const [peerInfo] = await peerUtils.createPeerInfo({ fixture: false }) + const [peerId] = await peerUtils.createPeerId({ fixture: false }) - delegate = new DelegatedContentRouter(peerInfo.id, { + delegate = new DelegatedContentRouter(peerId, { host: '0.0.0.0', protocol: 'http', port: 60197 @@ -227,9 +227,9 @@ describe('content-routing', () => { let delegate beforeEach(async () => { - const [peerInfo] = await peerUtils.createPeerInfo({ fixture: false }) + const [peerId] = await peerUtils.createPeerId({ fixture: false }) - delegate = new DelegatedContentRouter(peerInfo.id, { + delegate = new DelegatedContentRouter(peerId, { host: '0.0.0.0', protocol: 'http', port: 60197 diff --git a/test/content-routing/dht/configuration.node.js b/test/content-routing/dht/configuration.node.js index c0165cdd9f..d89f73b79b 100644 --- a/test/content-routing/dht/configuration.node.js +++ b/test/content-routing/dht/configuration.node.js @@ -32,11 +32,13 @@ describe('DHT subsystem is configurable', () => { }) it('should start and stop by default once libp2p starts', async () => { - const [peerInfo] = await peerUtils.createPeerInfo(1) - peerInfo.multiaddrs.add(listenAddr) + const [peerId] = await peerUtils.createPeerId(1) const customOptions = mergeOptions(subsystemOptions, { - peerInfo + peerId, + addresses: { + listen: [listenAddr] + } }) libp2p = await create(customOptions) @@ -50,11 +52,13 @@ describe('DHT subsystem is configurable', () => { }) it('should not start if disabled once libp2p starts', async () => { - const [peerInfo] = await peerUtils.createPeerInfo(1) - peerInfo.multiaddrs.add(listenAddr) + const [peerId] = await peerUtils.createPeerId(1) const customOptions = mergeOptions(subsystemOptions, { - peerInfo, + peerId, + addresses: { + listen: [listenAddr] + }, config: { dht: { enabled: false @@ -70,11 +74,13 @@ describe('DHT subsystem is configurable', () => { }) it('should allow a manual start', async () => { - const [peerInfo] = await peerUtils.createPeerInfo(1) - peerInfo.multiaddrs.add(listenAddr) + const [peerId] = await peerUtils.createPeerId(1) const customOptions = mergeOptions(subsystemOptions, { - peerInfo, + peerId, + addresses: { + listen: [listenAddr] + }, config: { dht: { enabled: false diff --git a/test/content-routing/dht/operation.node.js b/test/content-routing/dht/operation.node.js index 53643c3c3a..1b29c1cbc6 100644 --- a/test/content-routing/dht/operation.node.js +++ b/test/content-routing/dht/operation.node.js @@ -17,25 +17,28 @@ const listenAddr = multiaddr('/ip4/127.0.0.1/tcp/8000') const remoteListenAddr = multiaddr('/ip4/127.0.0.1/tcp/8001') describe('DHT subsystem operates correctly', () => { - let peerInfo, remotePeerInfo + let peerId, remotePeerId let libp2p, remoteLibp2p let remAddr beforeEach(async () => { - [peerInfo, remotePeerInfo] = await peerUtils.createPeerInfo({ number: 2 }) - - peerInfo.multiaddrs.add(listenAddr) - remotePeerInfo.multiaddrs.add(remoteListenAddr) + [peerId, remotePeerId] = await peerUtils.createPeerId({ number: 2 }) }) describe('dht started before connect', () => { beforeEach(async () => { libp2p = await create(mergeOptions(subsystemOptions, { - peerInfo + peerId, + addresses: { + listen: [listenAddr] + } })) remoteLibp2p = await create(mergeOptions(subsystemOptions, { - peerInfo: remotePeerInfo + peerId: remotePeerId, + addresses: { + listen: [remoteListenAddr] + } })) await Promise.all([ @@ -43,8 +46,8 @@ describe('DHT subsystem operates correctly', () => { remoteLibp2p.start() ]) - libp2p.peerStore.addressBook.set(remotePeerInfo.id, [remoteListenAddr]) - remAddr = libp2p.peerStore.addressBook.getMultiaddrsForPeer(remotePeerInfo.id)[0] + libp2p.peerStore.addressBook.set(remotePeerId, [remoteListenAddr]) + remAddr = libp2p.peerStore.addressBook.getMultiaddrsForPeer(remotePeerId)[0] }) afterEach(() => Promise.all([ @@ -83,11 +86,17 @@ describe('DHT subsystem operates correctly', () => { describe('dht started after connect', () => { beforeEach(async () => { libp2p = await create(mergeOptions(subsystemOptions, { - peerInfo + peerId, + addresses: { + listen: [listenAddr] + } })) remoteLibp2p = await create(mergeOptions(subsystemOptions, { - peerInfo: remotePeerInfo, + peerId: remotePeerId, + addresses: { + listen: [remoteListenAddr] + }, config: { dht: { enabled: false @@ -98,8 +107,8 @@ describe('DHT subsystem operates correctly', () => { await libp2p.start() await remoteLibp2p.start() - libp2p.peerStore.addressBook.set(remotePeerInfo.id, [remoteListenAddr]) - remAddr = libp2p.peerStore.addressBook.getMultiaddrsForPeer(remotePeerInfo.id)[0] + libp2p.peerStore.addressBook.set(remotePeerId, [remoteListenAddr]) + remAddr = libp2p.peerStore.addressBook.getMultiaddrsForPeer(remotePeerId)[0] }) afterEach(() => Promise.all([ diff --git a/test/core/listening.node.js b/test/core/listening.node.js index c51af16484..b54f481403 100644 --- a/test/core/listening.node.js +++ b/test/core/listening.node.js @@ -14,12 +14,11 @@ const peerUtils = require('../utils/creators/peer') const listenAddr = multiaddr('/ip4/0.0.0.0/tcp/0') describe('Listening', () => { - let peerInfo + let peerId let libp2p before(async () => { - [peerInfo] = await peerUtils.createPeerInfo() - peerInfo.multiaddrs.add(listenAddr) + [peerId] = await peerUtils.createPeerId() }) after(async () => { @@ -28,7 +27,10 @@ describe('Listening', () => { it('should replace wildcard host and port with actual host and port on startup', async () => { libp2p = await create({ - peerInfo, + peerId, + addresses: { + listen: [listenAddr] + }, modules: { transport: [Transport] } @@ -36,7 +38,7 @@ describe('Listening', () => { await libp2p.start() - const addrs = libp2p.peerInfo.multiaddrs.toArray() + const addrs = libp2p.addresses.listen // Should get something like: // /ip4/127.0.0.1/tcp/50866 diff --git a/test/core/ping.node.js b/test/core/ping.node.js index bbfb908fb1..e5546a0a52 100644 --- a/test/core/ping.node.js +++ b/test/core/ping.node.js @@ -20,16 +20,19 @@ describe('ping', () => { number: 2, config: baseOptions }) + + nodes[0].peerStore.addressBook.set(nodes[1].peerId, nodes[1].addresses.listen) + nodes[1].peerStore.addressBook.set(nodes[0].peerId, nodes[0].addresses.listen) }) it('ping once from peer0 to peer1', async () => { - const latency = await nodes[0].ping(nodes[1].peerInfo) + const latency = await nodes[0].ping(nodes[1].peerId) expect(latency).to.be.a('Number') }) it('ping several times for getting an average', async () => { - const latencies = await pTimes(5, () => nodes[1].ping(nodes[0].peerInfo)) + const latencies = await pTimes(5, () => nodes[1].ping(nodes[0].peerId)) const averageLatency = latencies.reduce((p, c) => p + c, 0) / latencies.length expect(averageLatency).to.be.a('Number') @@ -66,7 +69,7 @@ describe('ping', () => { ) }) - const latency = await nodes[0].ping(nodes[1].peerInfo) + const latency = await nodes[0].ping(nodes[1].peerId) expect(latency).to.be.a('Number') }) diff --git a/test/dialing/direct.node.js b/test/dialing/direct.node.js index 734687d0a2..d7c9e6fa02 100644 --- a/test/dialing/direct.node.js +++ b/test/dialing/direct.node.js @@ -11,7 +11,6 @@ const Muxer = require('libp2p-mplex') const Crypto = require('libp2p-secio') const multiaddr = require('multiaddr') const PeerId = require('peer-id') -const PeerInfo = require('peer-info') const delay = require('delay') const pDefer = require('p-defer') const pSettle = require('p-settle') @@ -31,7 +30,7 @@ const swarmKeyBuffer = Buffer.from(require('../fixtures/swarm.key')) const mockUpgrader = require('../utils/mockUpgrader') const createMockConnection = require('../utils/mockConnection') const Peers = require('../fixtures/peers') -const { createPeerInfo } = require('../utils/creators/peer') +const { createPeerId } = require('../utils/creators/peer') const listenAddr = multiaddr('/ip4/127.0.0.1/tcp/0') const unsupportedAddr = multiaddr('/ip4/127.0.0.1/tcp/9999/ws/p2p/QmckxVrJw1Yo8LqvmDJNUmdAsKtSbiKWmrXJFyKmUraBoN') @@ -80,9 +79,7 @@ describe('Dialing (direct, TCP)', () => { it('should be able to connect to a remote node via its stringified multiaddr', async () => { const dialer = new Dialer({ transportManager: localTM, peerStore }) - - const dialable = Dialer.getDialable(remoteAddr.toString()) - const connection = await dialer.connectToPeer(dialable) + const connection = await dialer.connectToPeer(remoteAddr.toString()) expect(connection).to.exist() await connection.close() }) @@ -95,24 +92,6 @@ describe('Dialing (direct, TCP)', () => { .and.to.have.nested.property('._errors[0].code', ErrorCodes.ERR_TRANSPORT_UNAVAILABLE) }) - it('should be able to connect to a given peer info', async () => { - const dialer = new Dialer({ - transportManager: localTM, - peerStore: { - addressBook: { - add: () => {}, - getMultiaddrsForPeer: () => [remoteAddr] - } - } - }) - const peerId = await PeerId.createFromJSON(Peers[0]) - const peerInfo = new PeerInfo(peerId) - - const connection = await dialer.connectToPeer(peerInfo) - expect(connection).to.exist() - await connection.close() - }) - it('should be able to connect to a given peer id', async () => { const peerStore = new PeerStore() const dialer = new Dialer({ @@ -121,11 +100,9 @@ describe('Dialing (direct, TCP)', () => { }) const peerId = await PeerId.createFromJSON(Peers[0]) - const peerInfo = new PeerInfo(peerId) - peerInfo.multiaddrs.add(remoteAddr) - peerStore.addressBook.set(peerInfo.id, peerInfo.multiaddrs.toArray()) + peerStore.addressBook.set(peerId, [remoteAddr]) - const connection = await dialer.connectToPeer(peerInfo) + const connection = await dialer.connectToPeer(peerId) expect(connection).to.exist() await connection.close() }) @@ -141,9 +118,8 @@ describe('Dialing (direct, TCP)', () => { } }) const peerId = await PeerId.createFromJSON(Peers[0]) - const peerInfo = new PeerInfo(peerId) - await expect(dialer.connectToPeer(peerInfo)) + await expect(dialer.connectToPeer(peerId)) .to.eventually.be.rejectedWith(AggregateError) .and.to.have.nested.property('._errors[0].code', ErrorCodes.ERR_TRANSPORT_UNAVAILABLE) }) @@ -190,10 +166,10 @@ describe('Dialing (direct, TCP)', () => { const deferredDial = pDefer() sinon.stub(localTM, 'dial').callsFake(() => deferredDial.promise) - const [peerInfo] = await createPeerInfo() + const [peerId] = await createPeerId() // Perform 3 multiaddr dials - dialer.connectToPeer(peerInfo) + dialer.connectToPeer(peerId) // Let the call stack run await delay(0) @@ -212,30 +188,28 @@ describe('Dialing (direct, TCP)', () => { }) describe('libp2p.dialer', () => { - let peerInfo - let remotePeerInfo + let peerId, remotePeerId let libp2p let remoteLibp2p let remoteAddr before(async () => { - const [peerId, remotePeerId] = await Promise.all([ + [peerId, remotePeerId] = await Promise.all([ PeerId.createFromJSON(Peers[0]), PeerId.createFromJSON(Peers[1]) ]) - peerInfo = new PeerInfo(peerId) - remotePeerInfo = new PeerInfo(remotePeerId) - remoteLibp2p = new Libp2p({ - peerInfo: remotePeerInfo, + peerId: remotePeerId, + addresses: { + listen: [listenAddr] + }, modules: { transport: [Transport], streamMuxer: [Muxer], connEncryption: [Crypto] } }) - remoteLibp2p.peerInfo.multiaddrs.add(listenAddr) remoteLibp2p.handle('/echo/1.0.0', ({ stream }) => pipe(stream, stream)) await remoteLibp2p.start() @@ -252,7 +226,7 @@ describe('Dialing (direct, TCP)', () => { it('should fail if no peer id is provided', async () => { libp2p = new Libp2p({ - peerInfo, + peerId, modules: { transport: [Transport], streamMuxer: [Muxer], @@ -265,7 +239,7 @@ describe('Dialing (direct, TCP)', () => { try { await libp2p.dial(remoteLibp2p.transportManager.getAddrs()[0]) } catch (err) { - expect(err).to.have.property('code', ErrorCodes.ERR_INVALID_PEER) + expect(err).to.have.property('code', ErrorCodes.ERR_INVALID_MULTIADDR) return } @@ -274,7 +248,7 @@ describe('Dialing (direct, TCP)', () => { it('should use the dialer for connecting to a multiaddr', async () => { libp2p = new Libp2p({ - peerInfo, + peerId, modules: { transport: [Transport], streamMuxer: [Muxer], @@ -295,7 +269,7 @@ describe('Dialing (direct, TCP)', () => { it('should use the dialer for connecting to a peer', async () => { libp2p = new Libp2p({ - peerInfo, + peerId, modules: { transport: [Transport], streamMuxer: [Muxer], @@ -304,8 +278,9 @@ describe('Dialing (direct, TCP)', () => { }) sinon.spy(libp2p.dialer, 'connectToPeer') + libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.addresses.listen) - const connection = await libp2p.dial(remotePeerInfo) + const connection = await libp2p.dial(remotePeerId) expect(connection).to.exist() const { stream, protocol } = await connection.newStream('/echo/1.0.0') expect(stream).to.exist() @@ -316,7 +291,7 @@ describe('Dialing (direct, TCP)', () => { it('should be able to use hangup to close connections', async () => { libp2p = new Libp2p({ - peerInfo, + peerId, modules: { transport: [Transport], streamMuxer: [Muxer], @@ -333,7 +308,7 @@ describe('Dialing (direct, TCP)', () => { it('should be able to use hangup by address string to close connections', async () => { libp2p = new Libp2p({ - peerInfo, + peerId, modules: { transport: [Transport], streamMuxer: [Muxer], @@ -351,7 +326,7 @@ describe('Dialing (direct, TCP)', () => { it('should use the protectors when provided for connecting', async () => { const protector = new Protector(swarmKeyBuffer) libp2p = new Libp2p({ - peerInfo, + peerId, modules: { transport: [Transport], streamMuxer: [Muxer], @@ -374,7 +349,7 @@ describe('Dialing (direct, TCP)', () => { it('should coalesce parallel dials to the same peer (id in multiaddr)', async () => { libp2p = new Libp2p({ - peerInfo, + peerId, modules: { transport: [Transport], streamMuxer: [Muxer], @@ -383,9 +358,11 @@ describe('Dialing (direct, TCP)', () => { }) const dials = 10 - const fullAddress = remoteAddr.encapsulate(`/p2p/${remoteLibp2p.peerInfo.id.toB58String()}`) + const fullAddress = remoteAddr.encapsulate(`/p2p/${remoteLibp2p.peerId.toB58String()}`) + + libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.addresses.listen) const dialResults = await Promise.all([...new Array(dials)].map((_, index) => { - if (index % 2 === 0) return libp2p.dial(remoteLibp2p.peerInfo) + if (index % 2 === 0) return libp2p.dial(remoteLibp2p.peerId) return libp2p.dial(fullAddress) })) @@ -402,7 +379,7 @@ describe('Dialing (direct, TCP)', () => { it('should coalesce parallel dials to the same error on failure', async () => { libp2p = new Libp2p({ - peerInfo, + peerId, modules: { transport: [Transport], streamMuxer: [Muxer], @@ -413,8 +390,9 @@ describe('Dialing (direct, TCP)', () => { const error = new Error('Boom') sinon.stub(libp2p.transportManager, 'dial').callsFake(() => Promise.reject(error)) + libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.addresses.listen) const dialResults = await pSettle([...new Array(dials)].map((_, index) => { - if (index % 2 === 0) return libp2p.dial(remoteLibp2p.peerInfo) + if (index % 2 === 0) return libp2p.dial(remoteLibp2p.peerId) return libp2p.dial(remoteAddr) })) diff --git a/test/dialing/direct.spec.js b/test/dialing/direct.spec.js index 3d4041120b..3ac71a0313 100644 --- a/test/dialing/direct.spec.js +++ b/test/dialing/direct.spec.js @@ -14,7 +14,6 @@ const Muxer = require('libp2p-mplex') const Crypto = require('libp2p-secio') const multiaddr = require('multiaddr') const PeerId = require('peer-id') -const PeerInfo = require('peer-info') const AggregateError = require('aggregate-error') const { AbortError } = require('libp2p-interfaces/src/transport/errors') @@ -267,13 +266,12 @@ describe('Dialing (direct, WebSockets)', () => { }) describe('libp2p.dialer', () => { - let peerInfo + let peerId let libp2p let remoteLibp2p before(async () => { - const peerId = await PeerId.createFromJSON(Peers[0]) - peerInfo = new PeerInfo(peerId) + peerId = await PeerId.createFromJSON(Peers[0]) }) afterEach(async () => { @@ -288,7 +286,7 @@ describe('Dialing (direct, WebSockets)', () => { it('should create a dialer', () => { libp2p = new Libp2p({ - peerInfo, + peerId, modules: { transport: [Transport], streamMuxer: [Muxer], @@ -306,7 +304,7 @@ describe('Dialing (direct, WebSockets)', () => { it('should be able to override dialer options', async () => { const config = { - peerInfo, + peerId, modules: { transport: [Transport], streamMuxer: [Muxer], @@ -328,7 +326,7 @@ describe('Dialing (direct, WebSockets)', () => { it('should use the dialer for connecting', async () => { libp2p = new Libp2p({ - peerInfo, + peerId, modules: { transport: [Transport], streamMuxer: [Muxer], @@ -351,7 +349,7 @@ describe('Dialing (direct, WebSockets)', () => { it('should run identify automatically after connecting', async () => { libp2p = new Libp2p({ - peerInfo, + peerId, modules: { transport: [Transport], streamMuxer: [Muxer], @@ -380,7 +378,7 @@ describe('Dialing (direct, WebSockets)', () => { it('should be able to use hangup to close connections', async () => { libp2p = new Libp2p({ - peerInfo, + peerId, modules: { transport: [Transport], streamMuxer: [Muxer], @@ -397,7 +395,7 @@ describe('Dialing (direct, WebSockets)', () => { it('should abort pending dials on stop', async () => { libp2p = new Libp2p({ - peerInfo, + peerId, modules: { transport: [Transport], streamMuxer: [Muxer], diff --git a/test/dialing/relay.node.js b/test/dialing/relay.node.js index 5fd0d8fca1..57a5e8ac88 100644 --- a/test/dialing/relay.node.js +++ b/test/dialing/relay.node.js @@ -5,7 +5,6 @@ const chai = require('chai') chai.use(require('dirty-chai')) chai.use(require('chai-as-promised')) const { expect } = chai -const sinon = require('sinon') const multiaddr = require('multiaddr') const { collect } = require('streaming-iterables') @@ -13,25 +12,30 @@ const pipe = require('it-pipe') const AggregateError = require('aggregate-error') const PeerId = require('peer-id') -const { createPeerInfo } = require('../utils/creators/peer') +const { createPeerId } = require('../utils/creators/peer') const baseOptions = require('../utils/base-options') const Libp2p = require('../../src') const { codes: Errors } = require('../../src/errors') +const listenAddr = multiaddr('/ip4/0.0.0.0/tcp/0') + describe('Dialing (via relay, TCP)', () => { let srcLibp2p let relayLibp2p let dstLibp2p before(async () => { - const peerInfos = await createPeerInfo({ number: 3 }) + const peerIds = await createPeerId({ number: 3 }) // Create 3 nodes, and turn HOP on for the relay - ;[srcLibp2p, relayLibp2p, dstLibp2p] = peerInfos.map((peerInfo, index) => { + ;[srcLibp2p, relayLibp2p, dstLibp2p] = peerIds.map((peerId, index) => { const opts = baseOptions index === 1 && (opts.config.relay.hop.enabled = true) return new Libp2p({ ...opts, - peerInfo + addresses: { + listen: [listenAddr] + }, + peerId }) }) @@ -40,12 +44,7 @@ describe('Dialing (via relay, TCP)', () => { beforeEach(() => { // Start each node - return Promise.all([srcLibp2p, relayLibp2p, dstLibp2p].map(libp2p => { - // Reset multiaddrs and start - libp2p.peerInfo.multiaddrs.clear() - libp2p.peerInfo.multiaddrs.add('/ip4/0.0.0.0/tcp/0') - return libp2p.start() - })) + return Promise.all([srcLibp2p, relayLibp2p, dstLibp2p].map(libp2p => libp2p.start())) }) afterEach(() => { @@ -62,11 +61,11 @@ describe('Dialing (via relay, TCP)', () => { it('should be able to connect to a peer over a relay with active connections', async () => { const relayAddr = relayLibp2p.transportManager.getAddrs()[0] - const relayIdString = relayLibp2p.peerInfo.id.toB58String() + const relayIdString = relayLibp2p.peerId.toB58String() const dialAddr = relayAddr .encapsulate(`/p2p/${relayIdString}`) - .encapsulate(`/p2p-circuit/p2p/${dstLibp2p.peerInfo.id.toB58String()}`) + .encapsulate(`/p2p-circuit/p2p/${dstLibp2p.peerId.toB58String()}`) const tcpAddrs = dstLibp2p.transportManager.getAddrs() await dstLibp2p.transportManager.listen([multiaddr(`/p2p-circuit${relayAddr}/p2p/${relayIdString}`)]) @@ -74,14 +73,14 @@ describe('Dialing (via relay, TCP)', () => { const connection = await srcLibp2p.dial(dialAddr) expect(connection).to.exist() - expect(connection.remotePeer.toBytes()).to.eql(dstLibp2p.peerInfo.id.toBytes()) - expect(connection.localPeer.toBytes()).to.eql(srcLibp2p.peerInfo.id.toBytes()) + expect(connection.remotePeer.toBytes()).to.eql(dstLibp2p.peerId.toBytes()) + expect(connection.localPeer.toBytes()).to.eql(srcLibp2p.peerId.toBytes()) expect(connection.remoteAddr).to.eql(dialAddr) expect(connection.localAddr).to.eql( relayAddr // the relay address .encapsulate(`/p2p/${relayIdString}`) // with its peer id .encapsulate('/p2p-circuit') // the local peer is connected over the relay - .encapsulate(`/p2p/${srcLibp2p.peerInfo.id.toB58String()}`) // and the local peer id + .encapsulate(`/p2p/${srcLibp2p.peerId.toB58String()}`) // and the local peer id ) const { stream: echoStream } = await connection.newStream('/echo/1.0.0') @@ -97,11 +96,11 @@ describe('Dialing (via relay, TCP)', () => { it('should fail to connect to a peer over a relay with inactive connections', async () => { const relayAddr = relayLibp2p.transportManager.getAddrs()[0] - const relayIdString = relayLibp2p.peerInfo.id.toB58String() + const relayIdString = relayLibp2p.peerId.toB58String() const dialAddr = relayAddr .encapsulate(`/p2p/${relayIdString}`) - .encapsulate(`/p2p-circuit/p2p/${dstLibp2p.peerInfo.id.toB58String()}`) + .encapsulate(`/p2p-circuit/p2p/${dstLibp2p.peerId.toB58String()}`) await expect(srcLibp2p.dial(dialAddr)) .to.eventually.be.rejectedWith(AggregateError) @@ -110,27 +109,27 @@ describe('Dialing (via relay, TCP)', () => { it('should not stay connected to a relay when not already connected and HOP fails', async () => { const relayAddr = relayLibp2p.transportManager.getAddrs()[0] - const relayIdString = relayLibp2p.peerInfo.id.toB58String() + const relayIdString = relayLibp2p.peerId.toB58String() const dialAddr = relayAddr .encapsulate(`/p2p/${relayIdString}`) - .encapsulate(`/p2p-circuit/p2p/${dstLibp2p.peerInfo.id.toB58String()}`) + .encapsulate(`/p2p-circuit/p2p/${dstLibp2p.peerId.toB58String()}`) await expect(srcLibp2p.dial(dialAddr)) .to.eventually.be.rejectedWith(AggregateError) .and.to.have.nested.property('._errors[0].code', Errors.ERR_HOP_REQUEST_FAILED) // We should not be connected to the relay, because we weren't before the dial - const srcToRelayConn = srcLibp2p.registrar.getConnection(relayLibp2p.peerInfo) + const srcToRelayConn = srcLibp2p.registrar.getConnection(relayLibp2p.peerId) expect(srcToRelayConn).to.not.exist() }) it('dialer should stay connected to an already connected relay on hop failure', async () => { - const relayIdString = relayLibp2p.peerInfo.id.toB58String() + const relayIdString = relayLibp2p.peerId.toB58String() const relayAddr = relayLibp2p.transportManager.getAddrs()[0].encapsulate(`/p2p/${relayIdString}`) const dialAddr = relayAddr - .encapsulate(`/p2p-circuit/p2p/${dstLibp2p.peerInfo.id.toB58String()}`) + .encapsulate(`/p2p-circuit/p2p/${dstLibp2p.peerId.toB58String()}`) await srcLibp2p.dial(relayAddr) @@ -138,17 +137,17 @@ describe('Dialing (via relay, TCP)', () => { .to.eventually.be.rejectedWith(AggregateError) .and.to.have.nested.property('._errors[0].code', Errors.ERR_HOP_REQUEST_FAILED) - const srcToRelayConn = srcLibp2p.registrar.getConnection(relayLibp2p.peerInfo) + const srcToRelayConn = srcLibp2p.registrar.getConnection(relayLibp2p.peerId) expect(srcToRelayConn).to.exist() expect(srcToRelayConn.stat.status).to.equal('open') }) it('destination peer should stay connected to an already connected relay on hop failure', async () => { - const relayIdString = relayLibp2p.peerInfo.id.toB58String() + const relayIdString = relayLibp2p.peerId.toB58String() const relayAddr = relayLibp2p.transportManager.getAddrs()[0].encapsulate(`/p2p/${relayIdString}`) const dialAddr = relayAddr - .encapsulate(`/p2p-circuit/p2p/${dstLibp2p.peerInfo.id.toB58String()}`) + .encapsulate(`/p2p-circuit/p2p/${dstLibp2p.peerId.toB58String()}`) // Connect the destination peer and the relay const tcpAddrs = dstLibp2p.transportManager.getAddrs() @@ -156,15 +155,15 @@ describe('Dialing (via relay, TCP)', () => { expect(dstLibp2p.transportManager.getAddrs()).to.have.deep.members([...tcpAddrs, dialAddr.decapsulate('p2p')]) // Tamper with the our multiaddrs for the circuit message - sinon.stub(srcLibp2p.peerInfo.multiaddrs, 'toArray').returns([{ + srcLibp2p.addresses.listen = [{ buffer: Buffer.from('an invalid multiaddr') - }]) + }] await expect(srcLibp2p.dial(dialAddr)) .to.eventually.be.rejectedWith(AggregateError) .and.to.have.nested.property('._errors[0].code', Errors.ERR_HOP_REQUEST_FAILED) - const dstToRelayConn = dstLibp2p.registrar.getConnection(relayLibp2p.peerInfo) + const dstToRelayConn = dstLibp2p.registrar.getConnection(relayLibp2p.peerId) expect(dstToRelayConn).to.exist() expect(dstToRelayConn.stat.status).to.equal('open') }) diff --git a/test/identify/index.spec.js b/test/identify/index.spec.js index 7af2f67c84..648000a147 100644 --- a/test/identify/index.spec.js +++ b/test/identify/index.spec.js @@ -9,7 +9,6 @@ const sinon = require('sinon') const delay = require('delay') const PeerId = require('peer-id') -const PeerInfo = require('peer-info') const duplexPair = require('it-pair/duplex') const multiaddr = require('multiaddr') const pWaitFor = require('p-wait-for') @@ -35,7 +34,7 @@ describe('Identify', () => { [localPeer, remotePeer] = (await Promise.all([ PeerId.createFromJSON(Peers[0]), PeerId.createFromJSON(Peers[1]) - ])).map(id => new PeerInfo(id)) + ])) }) afterEach(() => { @@ -44,7 +43,10 @@ describe('Identify', () => { it('should be able to identify another peer', async () => { const localIdentify = new IdentifyService({ - peerInfo: localPeer, + peerId: localPeer, + addresses: { + listen: [] + }, protocols, registrar: { peerStore: { @@ -58,12 +60,15 @@ describe('Identify', () => { } }) const remoteIdentify = new IdentifyService({ - peerInfo: remotePeer, + peerId: remotePeer, + addresses: { + listen: [] + }, protocols }) const observedAddr = multiaddr('/ip4/127.0.0.1/tcp/1234') - const localConnectionMock = { newStream: () => {}, remotePeer: remotePeer.id } + const localConnectionMock = { newStream: () => {}, remotePeer } const remoteConnectionMock = { remoteAddr: observedAddr } const [local, remote] = duplexPair() @@ -86,12 +91,15 @@ describe('Identify', () => { expect(localIdentify.registrar.peerStore.protoBook.set.callCount).to.equal(1) // Validate the remote peer gets updated in the peer store const call = localIdentify.registrar.peerStore.addressBook.set.firstCall - expect(call.args[0].id.bytes).to.equal(remotePeer.id.bytes) + expect(call.args[0].id.bytes).to.equal(remotePeer.bytes) }) it('should throw if identified peer is the wrong peer', async () => { const localIdentify = new IdentifyService({ - peerInfo: localPeer, + peerId: localPeer, + addresses: { + listen: [] + }, protocols, registrar: { peerStore: { @@ -105,12 +113,15 @@ describe('Identify', () => { } }) const remoteIdentify = new IdentifyService({ - peerInfo: remotePeer, + peerId: remotePeer, + addresses: { + listen: [] + }, protocols }) const observedAddr = multiaddr('/ip4/127.0.0.1/tcp/1234') - const localConnectionMock = { newStream: () => {}, remotePeer: localPeer.id } + const localConnectionMock = { newStream: () => {}, remotePeer: localPeer } const remoteConnectionMock = { remoteAddr: observedAddr } const [local, remote] = duplexPair() @@ -118,7 +129,7 @@ describe('Identify', () => { // Run identify const identifyPromise = Promise.all([ - localIdentify.identify(localConnectionMock, localPeer.id), + localIdentify.identify(localConnectionMock, localPeer), remoteIdentify.handleMessage({ connection: remoteConnectionMock, stream: remote, @@ -133,8 +144,12 @@ describe('Identify', () => { describe('push', () => { it('should be able to push identify updates to another peer', async () => { + const listeningAddr = multiaddr('/ip4/127.0.0.1/tcp/1234') const localIdentify = new IdentifyService({ - peerInfo: localPeer, + peerId: localPeer, + addresses: { + listen: [listeningAddr] + }, registrar: { getConnection: () => {} }, protocols: new Map([ [multicodecs.IDENTIFY], @@ -143,7 +158,10 @@ describe('Identify', () => { ]) }) const remoteIdentify = new IdentifyService({ - peerInfo: remotePeer, + peerId: remotePeer, + addresses: { + listen: [] + }, registrar: { peerStore: { addressBook: { @@ -158,13 +176,8 @@ describe('Identify', () => { // Setup peer protocols and multiaddrs const localProtocols = new Set([multicodecs.IDENTIFY, multicodecs.IDENTIFY_PUSH, '/echo/1.0.0']) - const listeningAddr = multiaddr('/ip4/127.0.0.1/tcp/1234') - sinon.stub(localPeer.multiaddrs, 'toArray').returns([listeningAddr]) - sinon.stub(localPeer, 'protocols').value(localProtocols) - sinon.stub(remotePeer, 'protocols').value(new Set([multicodecs.IDENTIFY, multicodecs.IDENTIFY_PUSH])) - const localConnectionMock = { newStream: () => {} } - const remoteConnectionMock = { remotePeer: localPeer.id } + const remoteConnectionMock = { remotePeer: localPeer } const [local, remote] = duplexPair() sinon.stub(localConnectionMock, 'newStream').returns({ stream: local, protocol: multicodecs.IDENTIFY_PUSH }) @@ -185,22 +198,21 @@ describe('Identify', () => { expect(remoteIdentify.registrar.peerStore.addressBook.set.callCount).to.equal(1) expect(remoteIdentify.registrar.peerStore.protoBook.set.callCount).to.equal(1) const [peerId, multiaddrs] = remoteIdentify.registrar.peerStore.addressBook.set.firstCall.args - expect(peerId.bytes).to.eql(localPeer.id.bytes) + expect(peerId.bytes).to.eql(localPeer.bytes) expect(multiaddrs).to.eql([listeningAddr]) const [peerId2, protocols] = remoteIdentify.registrar.peerStore.protoBook.set.firstCall.args - expect(peerId2.bytes).to.eql(localPeer.id.bytes) + expect(peerId2.bytes).to.eql(localPeer.bytes) expect(protocols).to.eql(Array.from(localProtocols)) }) }) describe('libp2p.dialer.identifyService', () => { - let peerInfo + let peerId let libp2p let remoteLibp2p before(async () => { - const peerId = await PeerId.createFromJSON(Peers[0]) - peerInfo = new PeerInfo(peerId) + peerId = await PeerId.createFromJSON(Peers[0]) }) afterEach(async () => { @@ -216,7 +228,7 @@ describe('Identify', () => { it('should run identify automatically after connecting', async () => { libp2p = new Libp2p({ ...baseOptions, - peerInfo + peerId }) sinon.spy(libp2p.identifyService, 'identify') @@ -239,7 +251,7 @@ describe('Identify', () => { it('should push protocol updates to an already connected peer', async () => { libp2p = new Libp2p({ ...baseOptions, - peerInfo + peerId }) sinon.spy(libp2p.identifyService, 'identify') diff --git a/test/metrics/index.node.js b/test/metrics/index.node.js index f3fa8c7c87..4387bf16e7 100644 --- a/test/metrics/index.node.js +++ b/test/metrics/index.node.js @@ -74,7 +74,7 @@ describe('libp2p.metrics', () => { remoteLibp2p.handle('/echo/1.0.0', ({ stream }) => pipe(stream, stream)) - const connection = await libp2p.dial(remoteLibp2p.peerInfo) + const connection = await libp2p.dial(remoteLibp2p.peerId) const { stream } = await connection.newStream('/echo/1.0.0') const bytes = randomBytes(512) @@ -109,6 +109,11 @@ describe('libp2p.metrics', () => { enabled: true, computeThrottleMaxQueueSize: 1, // compute after every message movingAverageIntervals: [10] + }, + config: { + peerDiscovery: { + autoDial: false + } } } let remoteLibp2p @@ -116,7 +121,7 @@ describe('libp2p.metrics', () => { remoteLibp2p.handle('/echo/1.0.0', ({ stream }) => pipe(stream, stream)) - const connection = await libp2p.dial(remoteLibp2p.peerInfo) + const connection = await libp2p.dial(remoteLibp2p.peerId) const { stream } = await connection.newStream('/echo/1.0.0') const bytes = randomBytes(512) diff --git a/test/peer-discovery/index.node.js b/test/peer-discovery/index.node.js index 2f315e29b9..1784c63d32 100644 --- a/test/peer-discovery/index.node.js +++ b/test/peer-discovery/index.node.js @@ -15,18 +15,16 @@ const multiaddr = require('multiaddr') const Libp2p = require('../../src') const baseOptions = require('../utils/base-options') -const { createPeerInfo } = require('../utils/creators/peer') +const { createPeerId } = require('../utils/creators/peer') + +const listenAddr = multiaddr('/ip4/127.0.0.1/tcp/0') describe('peer discovery scenarios', () => { - let peerInfo, remotePeerInfo1, remotePeerInfo2 + let peerId, remotePeerId1, remotePeerId2 let libp2p before(async () => { - [peerInfo, remotePeerInfo1, remotePeerInfo2] = await createPeerInfo({ number: 3 }) - - peerInfo.multiaddrs.add(multiaddr('/ip4/127.0.0.1/tcp/0')) - remotePeerInfo1.multiaddrs.add(multiaddr('/ip4/127.0.0.1/tcp/0')) - remotePeerInfo2.multiaddrs.add(multiaddr('/ip4/127.0.0.1/tcp/0')) + [peerId, remotePeerId1, remotePeerId2] = await createPeerId({ number: 3 }) }) afterEach(async () => { @@ -37,12 +35,15 @@ describe('peer discovery scenarios', () => { const deferred = defer() const bootstrappers = [ - ...remotePeerInfo1.multiaddrs.toArray().map((ma) => `${ma}/p2p/${remotePeerInfo1.id.toB58String()}`), - ...remotePeerInfo2.multiaddrs.toArray().map((ma) => `${ma}/p2p/${remotePeerInfo2.id.toB58String()}`) + `${listenAddr}/p2p/${remotePeerId1.toB58String()}`, + `${listenAddr}/p2p/${remotePeerId2.toB58String()}` ] libp2p = new Libp2p(mergeOptions(baseOptions, { - peerInfo, + peerId, + addresses: { + listen: [listenAddr] + }, modules: { peerDiscovery: [Bootstrap] }, @@ -58,12 +59,12 @@ describe('peer discovery scenarios', () => { })) const expectedPeers = new Set([ - remotePeerInfo1.id.toB58String(), - remotePeerInfo2.id.toB58String() + remotePeerId1.toB58String(), + remotePeerId2.toB58String() ]) - libp2p.on('peer:discovery', (peerInfo) => { - expectedPeers.delete(peerInfo.id.toB58String()) + libp2p.on('peer:discovery', (peerId) => { + expectedPeers.delete(peerId.toB58String()) if (expectedPeers.size === 0) { libp2p.removeAllListeners('peer:discovery') deferred.resolve() @@ -78,8 +79,11 @@ describe('peer discovery scenarios', () => { it('MulticastDNS should discover all peers on the local network', async () => { const deferred = defer() - const getConfig = (peerInfo) => mergeOptions(baseOptions, { - peerInfo, + const getConfig = (peerId) => mergeOptions(baseOptions, { + peerId, + addresses: { + listen: [listenAddr] + }, modules: { peerDiscovery: [MulticastDNS] }, @@ -96,17 +100,17 @@ describe('peer discovery scenarios', () => { } }) - libp2p = new Libp2p(getConfig(peerInfo)) - const remoteLibp2p1 = new Libp2p(getConfig(remotePeerInfo1)) - const remoteLibp2p2 = new Libp2p(getConfig(remotePeerInfo2)) + libp2p = new Libp2p(getConfig(peerId)) + const remoteLibp2p1 = new Libp2p(getConfig(remotePeerId1)) + const remoteLibp2p2 = new Libp2p(getConfig(remotePeerId2)) const expectedPeers = new Set([ - remotePeerInfo1.id.toB58String(), - remotePeerInfo2.id.toB58String() + remotePeerId1.toB58String(), + remotePeerId2.toB58String() ]) - libp2p.on('peer:discovery', (peerInfo) => { - expectedPeers.delete(peerInfo.id.toB58String()) + libp2p.on('peer:discovery', (peerId) => { + expectedPeers.delete(peerId.toB58String()) if (expectedPeers.size === 0) { libp2p.removeAllListeners('peer:discovery') deferred.resolve() @@ -128,8 +132,11 @@ describe('peer discovery scenarios', () => { it('kad-dht should discover other peers', async () => { const deferred = defer() - const getConfig = (peerInfo) => mergeOptions(baseOptions, { - peerInfo, + const getConfig = (peerId) => mergeOptions(baseOptions, { + peerId, + addresses: { + listen: [listenAddr] + }, modules: { dht: KadDht }, @@ -149,16 +156,16 @@ describe('peer discovery scenarios', () => { } }) - const localConfig = getConfig(peerInfo) + const localConfig = getConfig(peerId) // Only run random walk on our local node localConfig.config.dht.randomWalk.enabled = true libp2p = new Libp2p(localConfig) - const remoteLibp2p1 = new Libp2p(getConfig(remotePeerInfo1)) - const remoteLibp2p2 = new Libp2p(getConfig(remotePeerInfo2)) + const remoteLibp2p1 = new Libp2p(getConfig(remotePeerId1)) + const remoteLibp2p2 = new Libp2p(getConfig(remotePeerId2)) - libp2p.on('peer:discovery', (peerInfo) => { - if (peerInfo.id.toB58String() === remotePeerInfo2.id.toB58String()) { + libp2p.on('peer:discovery', (peerId) => { + if (peerId.toB58String() === remotePeerId1.toB58String()) { libp2p.removeAllListeners('peer:discovery') deferred.resolve() } @@ -170,12 +177,15 @@ describe('peer discovery scenarios', () => { remoteLibp2p2.start() ]) + libp2p.peerStore.addressBook.set(remotePeerId1, remoteLibp2p1.addresses.listen) + remoteLibp2p2.peerStore.addressBook.set(remotePeerId1, remoteLibp2p1.addresses.listen) + // Topology: // A -> B // C -> B await Promise.all([ - libp2p.dial(remotePeerInfo1), - remoteLibp2p2.dial(remotePeerInfo1) + libp2p.dial(remotePeerId1), + remoteLibp2p2.dial(remotePeerId1) ]) await deferred.promise diff --git a/test/peer-discovery/index.spec.js b/test/peer-discovery/index.spec.js index 518cd68f71..574143fbc1 100644 --- a/test/peer-discovery/index.spec.js +++ b/test/peer-discovery/index.spec.js @@ -9,21 +9,22 @@ const sinon = require('sinon') const defer = require('p-defer') const mergeOptions = require('merge-options') +const multiaddr = require('multiaddr') const WebRTCStar = require('libp2p-webrtc-star') const Libp2p = require('../../src') const baseOptions = require('../utils/base-options.browser') -const { createPeerInfo } = require('../utils/creators/peer') +const { createPeerId } = require('../utils/creators/peer') const { EventEmitter } = require('events') describe('peer discovery', () => { describe('basic functions', () => { - let peerInfo - let remotePeerInfo + let peerId + let remotePeerId let libp2p before(async () => { - [peerInfo, remotePeerInfo] = await createPeerInfo({ number: 2 }) + [peerId, remotePeerId] = await createPeerId({ number: 2 }) }) afterEach(async () => { @@ -34,14 +35,14 @@ describe('peer discovery', () => { it('should dial know peers on startup', async () => { libp2p = new Libp2p({ ...baseOptions, - peerInfo + peerId }) - libp2p.peerStore.addressBook.set(remotePeerInfo.id, remotePeerInfo.multiaddrs.toArray()) - libp2p.peerStore.protoBook.set(remotePeerInfo.id, Array.from(remotePeerInfo.protocols)) + + libp2p.peerStore.addressBook.set(remotePeerId, [multiaddr('/ip4/165.1.1.1/tcp/80')]) const deferred = defer() - sinon.stub(libp2p.dialer, 'connectToPeer').callsFake((remotePeerInfo) => { - expect(remotePeerInfo).to.equal(remotePeerInfo) + sinon.stub(libp2p.dialer, 'connectToPeer').callsFake((remotePeerId) => { + expect(remotePeerId).to.equal(remotePeerId) deferred.resolve() }) const spy = sinon.spy() @@ -51,7 +52,7 @@ describe('peer discovery', () => { await deferred.promise expect(spy.calledOnce).to.eql(true) - expect(spy.getCall(0).args[0].id.toString()).to.eql(remotePeerInfo.id.toString()) + expect(spy.getCall(0).args[0].toString()).to.eql(remotePeerId.toString()) }) it('should ignore self on discovery', async () => { @@ -61,7 +62,7 @@ describe('peer discovery', () => { mockDiscovery.stop = () => {} libp2p = new Libp2p(mergeOptions(baseOptions, { - peerInfo, + peerId, modules: { peerDiscovery: [mockDiscovery] } @@ -70,7 +71,7 @@ describe('peer discovery', () => { await libp2p.start() const discoverySpy = sinon.spy() libp2p.on('peer:discovery', discoverySpy) - libp2p._discovery.get('mock').emit('peer', libp2p.peerInfo) + libp2p._discovery.get('mock').emit('peer', { id: libp2p.peerId }) expect(discoverySpy.called).to.eql(false) }) @@ -87,7 +88,7 @@ describe('peer discovery', () => { const stopSpy = sinon.spy(mockDiscovery, 'stop') libp2p = new Libp2p(mergeOptions(baseOptions, { - peerInfo, + peerId, modules: { peerDiscovery: [mockDiscovery] } @@ -103,15 +104,15 @@ describe('peer discovery', () => { }) describe('discovery modules from transports', () => { - let peerInfo, libp2p + let peerId, libp2p before(async () => { - [peerInfo] = await createPeerInfo() + [peerId] = await createPeerId() }) it('should add discovery module if present in transports and enabled', async () => { libp2p = new Libp2p(mergeOptions(baseOptions, { - peerInfo, + peerId, modules: { transport: [WebRTCStar] }, @@ -132,7 +133,7 @@ describe('peer discovery', () => { it('should not add discovery module if present in transports but disabled', async () => { libp2p = new Libp2p(mergeOptions(baseOptions, { - peerInfo, + peerId, modules: { transport: [WebRTCStar] }, diff --git a/test/peer-routing/peer-routing.node.js b/test/peer-routing/peer-routing.node.js index 54e2a2785c..460ff9c12d 100644 --- a/test/peer-routing/peer-routing.node.js +++ b/test/peer-routing/peer-routing.node.js @@ -44,7 +44,7 @@ describe('peer-routing', () => { // Ring dial await Promise.all( - nodes.map((peer, i) => peer.dial(nodes[(i + 1) % number].peerInfo)) + nodes.map((peer, i) => peer.dial(nodes[(i + 1) % number].peerId)) ) }) @@ -59,7 +59,7 @@ describe('peer-routing', () => { sinon.stub(nodes[0]._dht, 'findPeer').callsFake(() => { deferred.resolve() - return nodes[1].peerInfo + return nodes[1].peerId }) nodes[0].peerRouting.findPeer() @@ -104,7 +104,7 @@ describe('peer-routing', () => { sinon.stub(delegate, 'findPeer').callsFake(() => { deferred.resolve() - return 'fake peer-info' + return 'fake peer-id' }) await node.peerRouting.findPeer() @@ -121,9 +121,9 @@ describe('peer-routing', () => { 'X-Chunked-Output', '1' ]) - const peerInfo = await node.peerRouting.findPeer(peerKey) + const peerData = await node.peerRouting.findPeer(peerKey) - expect(peerInfo.id.toB58String()).to.equal(peerKey) + expect(peerData.id).to.equal(peerKey) expect(mockApi.isDone()).to.equal(true) }) @@ -188,7 +188,7 @@ describe('peer-routing', () => { sinon.stub(node._dht, 'findPeer').callsFake(() => { dhtDeferred.resolve() - return node.peerInfo + return { id: node.peerId } }) sinon.stub(delegate, 'findPeer').callsFake(() => { throw new Error('the delegate should not have been called') diff --git a/test/peer-store/peer-store.spec.js b/test/peer-store/peer-store.spec.js index 19b74af7ff..c5568763a1 100644 --- a/test/peer-store/peer-store.spec.js +++ b/test/peer-store/peer-store.spec.js @@ -123,7 +123,7 @@ describe('peer-store', () => { const peerSupporting2 = [] for (const [, peerInfo] of peerStore.peers.entries()) { - if (peerInfo.protocols.has(proto2)) { + if (peerInfo.protocols.includes(proto2)) { peerSupporting2.push(peerInfo) } } @@ -137,7 +137,9 @@ describe('peer-store', () => { const peerListenint4 = [] for (const [, peerInfo] of peerStore.peers.entries()) { - if (peerInfo.multiaddrs.has(addr4)) { + const multiaddrs = peerInfo.multiaddrInfos.map((mi) => mi.multiaddr) + + if (multiaddrs.includes(addr4)) { peerListenint4.push(peerInfo) } } diff --git a/test/pubsub/configuration.node.js b/test/pubsub/configuration.node.js index b66c3f82d6..aeb49f28d1 100644 --- a/test/pubsub/configuration.node.js +++ b/test/pubsub/configuration.node.js @@ -32,11 +32,13 @@ describe('Pubsub subsystem is configurable', () => { }) it('should start and stop by default once libp2p starts', async () => { - const [peerInfo] = await peerUtils.createPeerInfo() - peerInfo.multiaddrs.add(listenAddr) + const [peerId] = await peerUtils.createPeerId() const customOptions = mergeOptions(subsystemOptions, { - peerInfo + peerId, + addresses: { + listen: [listenAddr] + } }) libp2p = await create(customOptions) @@ -50,11 +52,13 @@ describe('Pubsub subsystem is configurable', () => { }) it('should not start if disabled once libp2p starts', async () => { - const [peerInfo] = await peerUtils.createPeerInfo() - peerInfo.multiaddrs.add(listenAddr) + const [peerId] = await peerUtils.createPeerId() const customOptions = mergeOptions(subsystemOptions, { - peerInfo, + peerId, + addresses: { + listen: [listenAddr] + }, config: { pubsub: { enabled: false @@ -70,11 +74,13 @@ describe('Pubsub subsystem is configurable', () => { }) it('should allow a manual start', async () => { - const [peerInfo] = await peerUtils.createPeerInfo() - peerInfo.multiaddrs.add(listenAddr) + const [peerId] = await peerUtils.createPeerId() const customOptions = mergeOptions(subsystemOptions, { - peerInfo, + peerId, + addresses: { + listen: [listenAddr] + }, config: { pubsub: { enabled: false diff --git a/test/pubsub/implementations.node.js b/test/pubsub/implementations.node.js index 796212a880..e5ee043e58 100644 --- a/test/pubsub/implementations.node.js +++ b/test/pubsub/implementations.node.js @@ -24,14 +24,11 @@ const listenAddr = multiaddr('/ip4/127.0.0.1/tcp/0') const remoteListenAddr = multiaddr('/ip4/127.0.0.1/tcp/0') describe('Pubsub subsystem is able to use different implementations', () => { - let peerInfo, remotePeerInfo + let peerId, remotePeerId let libp2p, remoteLibp2p beforeEach(async () => { - [peerInfo, remotePeerInfo] = await peerUtils.createPeerInfo({ number: 2 }) - - peerInfo.multiaddrs.add(listenAddr) - remotePeerInfo.multiaddrs.add(remoteListenAddr) + [peerId, remotePeerId] = await peerUtils.createPeerId({ number: 2 }) }) afterEach(() => Promise.all([ @@ -53,14 +50,20 @@ describe('Pubsub subsystem is able to use different implementations', () => { const data = 'hey!' libp2p = await create(mergeOptions(baseOptions, { - peerInfo, + peerId, + addresses: { + listen: [listenAddr] + }, modules: { pubsub: pubsub } })) remoteLibp2p = await create(mergeOptions(baseOptions, { - peerInfo: remotePeerInfo, + peerId: remotePeerId, + addresses: { + listen: [remoteListenAddr] + }, modules: { pubsub: pubsub } @@ -71,9 +74,10 @@ describe('Pubsub subsystem is able to use different implementations', () => { remoteLibp2p.start() ]) - const libp2pId = libp2p.peerInfo.id.toB58String() + const libp2pId = libp2p.peerId.toB58String() + libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.addresses.listen) - const connection = await libp2p.dialProtocol(remotePeerInfo, multicodec) + const connection = await libp2p.dialProtocol(remotePeerId, multicodec) expect(connection).to.exist() libp2p.pubsub.subscribe(topic, (msg) => { diff --git a/test/pubsub/operation.node.js b/test/pubsub/operation.node.js index a364d81668..f92c191536 100644 --- a/test/pubsub/operation.node.js +++ b/test/pubsub/operation.node.js @@ -19,30 +19,35 @@ const listenAddr = multiaddr('/ip4/127.0.0.1/tcp/0') const remoteListenAddr = multiaddr('/ip4/127.0.0.1/tcp/0') describe('Pubsub subsystem operates correctly', () => { - let peerInfo, remotePeerInfo + let peerId, remotePeerId let libp2p, remoteLibp2p beforeEach(async () => { - [peerInfo, remotePeerInfo] = await peerUtils.createPeerInfo({ number: 2 }) - - peerInfo.multiaddrs.add(listenAddr) - remotePeerInfo.multiaddrs.add(remoteListenAddr) + [peerId, remotePeerId] = await peerUtils.createPeerId({ number: 2 }) }) describe('pubsub started before connect', () => { beforeEach(async () => { libp2p = await create(mergeOptions(subsystemOptions, { - peerInfo + peerId, + addresses: { + listen: [listenAddr] + } })) remoteLibp2p = await create(mergeOptions(subsystemOptions, { - peerInfo: remotePeerInfo + peerId: remotePeerId, + addresses: { + listen: [remoteListenAddr] + } })) await Promise.all([ libp2p.start(), remoteLibp2p.start() ]) + + libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.addresses.listen) }) afterEach(() => Promise.all([ @@ -55,7 +60,7 @@ describe('Pubsub subsystem operates correctly', () => { }) it('should get notified of connected peers on dial', async () => { - const connection = await libp2p.dialProtocol(remotePeerInfo, subsystemMulticodecs) + const connection = await libp2p.dialProtocol(remotePeerId, subsystemMulticodecs) expect(connection).to.exist() @@ -69,9 +74,9 @@ describe('Pubsub subsystem operates correctly', () => { const defer = pDefer() const topic = 'test-topic' const data = 'hey!' - const libp2pId = libp2p.peerInfo.id.toB58String() + const libp2pId = libp2p.peerId.toB58String() - await libp2p.dialProtocol(remotePeerInfo, subsystemMulticodecs) + await libp2p.dialProtocol(remotePeerId, subsystemMulticodecs) let subscribedTopics = libp2p.pubsub.getTopics() expect(subscribedTopics).to.not.include(topic) @@ -98,11 +103,17 @@ describe('Pubsub subsystem operates correctly', () => { describe('pubsub started after connect', () => { beforeEach(async () => { libp2p = await create(mergeOptions(subsystemOptions, { - peerInfo + peerId, + addresses: { + listen: [listenAddr] + } })) remoteLibp2p = await create(mergeOptions(subsystemOptions, { - peerInfo: remotePeerInfo, + peerId: remotePeerId, + addresses: { + listen: [remoteListenAddr] + }, config: { pubsub: { enabled: false @@ -112,6 +123,8 @@ describe('Pubsub subsystem operates correctly', () => { await libp2p.start() await remoteLibp2p.start() + + libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.addresses.listen) }) afterEach(() => Promise.all([ @@ -124,7 +137,7 @@ describe('Pubsub subsystem operates correctly', () => { }) it('should get notified of connected peers after starting', async () => { - const connection = await libp2p.dial(remotePeerInfo) + const connection = await libp2p.dial(remotePeerId) expect(connection).to.exist() expect(libp2p.pubsub._pubsub.peers.size).to.be.eql(0) @@ -141,11 +154,11 @@ describe('Pubsub subsystem operates correctly', () => { it('should receive pubsub messages', async function () { this.timeout(10e3) const defer = pDefer() - const libp2pId = libp2p.peerInfo.id.toB58String() + const libp2pId = libp2p.peerId.toB58String() const topic = 'test-topic' const data = 'hey!' - await libp2p.dial(remotePeerInfo) + await libp2p.dial(remotePeerId) remoteLibp2p.pubsub.start() diff --git a/test/registrar/registrar.node.js b/test/registrar/registrar.node.js index df1c61657d..f2829d918a 100644 --- a/test/registrar/registrar.node.js +++ b/test/registrar/registrar.node.js @@ -16,20 +16,20 @@ const peerUtils = require('../utils/creators/peer') const listenAddr = multiaddr('/ip4/127.0.0.1/tcp/0') describe('registrar on dial', () => { - let peerInfo - let remotePeerInfo + let peerId + let remotePeerId let libp2p let remoteLibp2p let remoteAddr before(async () => { - [peerInfo, remotePeerInfo] = await peerUtils.createPeerInfo({ number: 2 }) + [peerId, remotePeerId] = await peerUtils.createPeerId({ number: 2 }) remoteLibp2p = new Libp2p(mergeOptions(baseOptions, { - peerInfo: remotePeerInfo + peerId: remotePeerId })) await remoteLibp2p.transportManager.listen([listenAddr]) - remoteAddr = remoteLibp2p.transportManager.getAddrs()[0].encapsulate(`/p2p/${remotePeerInfo.id.toB58String()}`) + remoteAddr = remoteLibp2p.transportManager.getAddrs()[0].encapsulate(`/p2p/${remotePeerId.toB58String()}`) }) after(async () => { @@ -40,7 +40,7 @@ describe('registrar on dial', () => { it('should inform registrar of a new connection', async () => { libp2p = new Libp2p(mergeOptions(baseOptions, { - peerInfo + peerId })) sinon.spy(remoteLibp2p.registrar, 'onConnect') @@ -48,16 +48,16 @@ describe('registrar on dial', () => { await libp2p.dial(remoteAddr) expect(remoteLibp2p.registrar.onConnect.callCount).to.equal(1) - const libp2pConn = libp2p.registrar.getConnection(remotePeerInfo) + const libp2pConn = libp2p.registrar.getConnection(remotePeerId) expect(libp2pConn).to.exist() - const remoteConn = remoteLibp2p.registrar.getConnection(peerInfo) + const remoteConn = remoteLibp2p.registrar.getConnection(peerId) expect(remoteConn).to.exist() }) it('should be closed on libp2p stop', async () => { libp2p = new Libp2p(mergeOptions(baseOptions, { - peerInfo + peerId })) await libp2p.dial(remoteAddr) diff --git a/test/registrar/registrar.spec.js b/test/registrar/registrar.spec.js index 3e2744901e..696531e98b 100644 --- a/test/registrar/registrar.spec.js +++ b/test/registrar/registrar.spec.js @@ -6,7 +6,6 @@ chai.use(require('dirty-chai')) const { expect } = chai const pDefer = require('p-defer') -const PeerInfo = require('peer-info') const Topology = require('libp2p-interfaces/src/topology/multicodec-topology') const PeerStore = require('../../src/peer-store') const Registrar = require('../../src/registrar') @@ -83,29 +82,25 @@ describe('registrar', () => { // Setup connections before registrar const conn = await createMockConnection() - const remotePeerInfo = await PeerInfo.create(conn.remotePeer) + const remotePeerId = conn.remotePeer - // Add protocol to peer - remotePeerInfo.protocols.add(multicodec) + // Add connected peer with protocol to peerStore and registrar + peerStore.protoBook.add(remotePeerId, [multicodec]) - // Add connected peer to peerStore and registrar - peerStore.addressBook.set(remotePeerInfo.id, remotePeerInfo.multiaddrs.toArray()) - peerStore.protoBook.set(remotePeerInfo.id, Array.from(remotePeerInfo.protocols)) - - registrar.onConnect(remotePeerInfo, conn) + registrar.onConnect(remotePeerId, conn) expect(registrar.connections.size).to.eql(1) const topologyProps = new Topology({ multicodecs: multicodec, handlers: { - onConnect: (peerInfo, connection) => { - expect(peerInfo.id.toB58String()).to.eql(remotePeerInfo.id.toB58String()) + onConnect: (peerId, connection) => { + expect(peerId.toB58String()).to.eql(remotePeerId.toB58String()) expect(connection.id).to.eql(conn.id) onConnectDefer.resolve() }, - onDisconnect: (peerInfo) => { - expect(peerInfo.id.toB58String()).to.eql(remotePeerInfo.id.toB58String()) + onDisconnect: (peerId) => { + expect(peerId.toB58String()).to.eql(remotePeerId.toB58String()) onDisconnectDefer.resolve() } @@ -119,7 +114,7 @@ describe('registrar', () => { // Topology created expect(topology).to.exist() - registrar.onDisconnect(remotePeerInfo) + registrar.onDisconnect(remotePeerId) expect(registrar.connections.size).to.eql(0) // Wait for handlers to be called @@ -155,26 +150,19 @@ describe('registrar', () => { // Setup connections before registrar const conn = await createMockConnection() - const peerInfo = await PeerInfo.create(conn.remotePeer) + const remotePeerId = conn.remotePeer // Add connected peer to peerStore and registrar - peerStore.addressBook.set(peerInfo.id, peerInfo.multiaddrs.toArray()) - peerStore.protoBook.set(peerInfo.id, Array.from(peerInfo.protocols)) - - registrar.onConnect(peerInfo, conn) + peerStore.protoBook.set(remotePeerId, []) + registrar.onConnect(remotePeerId, conn) // Add protocol to peer and update it - peerInfo.protocols.add(multicodec) - peerStore.addressBook.add(peerInfo.id, peerInfo.multiaddrs.toArray()) - peerStore.protoBook.add(peerInfo.id, Array.from(peerInfo.protocols)) + peerStore.protoBook.add(remotePeerId, [multicodec]) await onConnectDefer.promise // Remove protocol to peer and update it - peerInfo.protocols.delete(multicodec) - - peerStore.addressBook.set(peerInfo.id, peerInfo.multiaddrs.toArray()) - peerStore.protoBook.set(peerInfo.id, Array.from(peerInfo.protocols)) + peerStore.protoBook.set(remotePeerId, []) await onDisconnectDefer.promise }) @@ -196,23 +184,21 @@ describe('registrar', () => { registrar.register(topologyProps) // Setup connections before registrar - const [localPeer, remotePeer] = await peerUtils.createPeerInfo({ number: 2 }) + const [localPeer, remotePeer] = await peerUtils.createPeerId({ number: 2 }) + + const conn1 = await createMockConnection({ localPeer, remotePeer }) + const conn2 = await createMockConnection({ localPeer, remotePeer }) - const conn1 = await createMockConnection({ localPeer: localPeer.id, remotePeer: remotePeer.id }) - const conn2 = await createMockConnection({ localPeer: localPeer.id, remotePeer: remotePeer.id }) - const peerInfo = await PeerInfo.create(remotePeer.id) - const id = peerInfo.id.toB58String() + const id = remotePeer.toB58String() // Add connection to registrar - peerStore.addressBook.set(peerInfo.id, peerInfo.multiaddrs.toArray()) - peerStore.protoBook.set(peerInfo.id, Array.from(peerInfo.protocols)) - registrar.onConnect(peerInfo, conn1) - registrar.onConnect(peerInfo, conn2) + registrar.onConnect(remotePeer, conn1) + registrar.onConnect(remotePeer, conn2) expect(registrar.connections.get(id).length).to.eql(2) conn2._stat.status = 'closed' - registrar.onDisconnect(peerInfo, conn2) + registrar.onDisconnect(remotePeer, conn2) const peerConnections = registrar.connections.get(id) expect(peerConnections.length).to.eql(1) diff --git a/test/registrar/utils.js b/test/registrar/utils.js index 5676827c22..727d99b195 100644 --- a/test/registrar/utils.js +++ b/test/registrar/utils.js @@ -11,13 +11,13 @@ module.exports.createMockConnection = async (properties = {}) => { const localAddr = multiaddr('/ip4/127.0.0.1/tcp/8080') const remoteAddr = multiaddr('/ip4/127.0.0.1/tcp/8081') - const [localPeer, remotePeer] = await peerUtils.createPeerInfo({ number: 2 }) + const [localPeer, remotePeer] = await peerUtils.createPeerId({ number: 2 }) const openStreams = [] let streamId = 0 return new Connection({ - localPeer: localPeer.id, - remotePeer: remotePeer.id, + localPeer: localPeer, + remotePeer: remotePeer, localAddr, remoteAddr, stat: { diff --git a/test/transports/transport-manager.spec.js b/test/transports/transport-manager.spec.js index 4e71001e21..d165923f49 100644 --- a/test/transports/transport-manager.spec.js +++ b/test/transports/transport-manager.spec.js @@ -16,7 +16,6 @@ const { codes: ErrorCodes } = require('../../src/errors') const Libp2p = require('../../src') const Peers = require('../fixtures/peers') const PeerId = require('peer-id') -const PeerInfo = require('peer-info') describe('Transport Manager (WebSockets)', () => { let tm @@ -88,12 +87,11 @@ describe('Transport Manager (WebSockets)', () => { }) describe('libp2p.transportManager', () => { - let peerInfo + let peerId let libp2p before(async () => { - const peerId = await PeerId.createFromJSON(Peers[0]) - peerInfo = new PeerInfo(peerId) + peerId = await PeerId.createFromJSON(Peers[0]) }) afterEach(async () => { @@ -104,7 +102,7 @@ describe('libp2p.transportManager', () => { it('should create a TransportManager', () => { libp2p = new Libp2p({ - peerInfo, + peerId, modules: { transport: [Transport] } @@ -122,7 +120,7 @@ describe('libp2p.transportManager', () => { another: 'value' } libp2p = new Libp2p({ - peerInfo, + peerId, modules: { transport: [spy] }, @@ -146,7 +144,7 @@ describe('libp2p.transportManager', () => { it('starting and stopping libp2p should start and stop TransportManager', async () => { libp2p = new Libp2p({ - peerInfo, + peerId, modules: { transport: [Transport] } diff --git a/test/upgrading/upgrader.spec.js b/test/upgrading/upgrader.spec.js index ef0cb84c9a..6579bfe347 100644 --- a/test/upgrading/upgrader.spec.js +++ b/test/upgrading/upgrader.spec.js @@ -8,7 +8,6 @@ const sinon = require('sinon') const Muxer = require('libp2p-mplex') const multiaddr = require('multiaddr') const PeerId = require('peer-id') -const PeerInfo = require('peer-info') const pipe = require('it-pipe') const { collect } = require('streaming-iterables') const pSettle = require('p-settle') @@ -347,11 +346,10 @@ describe('libp2p.upgrader', () => { let libp2p before(async () => { - const ids = await Promise.all([ + peers = await Promise.all([ PeerId.createFromJSON(Peers[0]), PeerId.createFromJSON(Peers[1]) ]) - peers = ids.map(peerId => new PeerInfo(peerId)) }) afterEach(async () => { @@ -363,7 +361,7 @@ describe('libp2p.upgrader', () => { it('should create an Upgrader', () => { const protector = new Protector(swarmKeyBuffer) libp2p = new Libp2p({ - peerInfo: peers[0], + peerId: peers[0], modules: { transport: [Transport], streamMuxer: [Muxer], @@ -382,7 +380,7 @@ describe('libp2p.upgrader', () => { it('should be able to register and unregister a handler', () => { libp2p = new Libp2p({ - peerInfo: peers[0], + peerId: peers[0], modules: { transport: [Transport], streamMuxer: [Muxer], @@ -405,7 +403,7 @@ describe('libp2p.upgrader', () => { it('should emit connect and disconnect events', async () => { const remotePeer = peers[1] libp2p = new Libp2p({ - peerInfo: peers[0], + peerId: peers[0], modules: { transport: [Transport], streamMuxer: [Muxer], @@ -414,12 +412,12 @@ describe('libp2p.upgrader', () => { }) const remoteUpgrader = new Upgrader({ - localPeer: remotePeer.id, + localPeer: remotePeer, muxers: new Map([[Muxer.multicodec, Muxer]]), cryptos: new Map([[Crypto.protocol, Crypto]]) }) - const { inbound, outbound } = mockMultiaddrConnPair({ addrs, remotePeer: remotePeer.id }) + const { inbound, outbound } = mockMultiaddrConnPair({ addrs, remotePeer }) // Spy on emit for easy verification sinon.spy(libp2p, 'emit') @@ -430,15 +428,16 @@ describe('libp2p.upgrader', () => { remoteUpgrader.upgradeInbound(inbound) ]) expect(libp2p.emit.callCount).to.equal(1) - let [event, peerInfo] = libp2p.emit.getCall(0).args + + let [event, peerId] = libp2p.emit.getCall(0).args expect(event).to.equal('peer:connect') - expect(peerInfo.id.isEqual(remotePeer.id)).to.equal(true) + expect(peerId.isEqual(remotePeer)).to.equal(true) // Close and check the disconnect event await Promise.all(connections.map(conn => conn.close())) expect(libp2p.emit.callCount).to.equal(2) - ;([event, peerInfo] = libp2p.emit.getCall(1).args) + ;([event, peerId] = libp2p.emit.getCall(1).args) expect(event).to.equal('peer:disconnect') - expect(peerInfo.id.isEqual(remotePeer.id)).to.equal(true) + expect(peerId.isEqual(remotePeer)).to.equal(true) }) }) diff --git a/test/utils/creators/peer.js b/test/utils/creators/peer.js index 31c2653398..d85c4e8d38 100644 --- a/test/utils/creators/peer.js +++ b/test/utils/creators/peer.js @@ -4,7 +4,6 @@ const pTimes = require('p-times') const multiaddr = require('multiaddr') const PeerId = require('peer-id') -const PeerInfo = require('peer-info') const Libp2p = require('../../../src') const Peers = require('../../fixtures/peers') @@ -19,37 +18,36 @@ const listenAddr = multiaddr('/ip4/127.0.0.1/tcp/0') * @param {number} [properties.number] number of peers (default: 1). * @param {boolean} [properties.fixture] use fixture for peer-id generation (default: true) * @param {boolean} [properties.started] nodes should start (default: true) + * @param {boolean} [properties.populateAddressBooks] nodes addressBooks should be populated with other peers (default: true) * @return {Promise>} */ -async function createPeer ({ number = 1, fixture = true, started = true, config = defaultOptions } = {}) { - const peerInfos = await createPeerInfo({ number, fixture }) +async function createPeer ({ number = 1, fixture = true, started = true, populateAddressBooks = true, config = defaultOptions } = {}) { + const peerIds = await createPeerId({ number, fixture }) + const addresses = started ? { listen: [listenAddr] } : {} const peers = await pTimes(number, (i) => Libp2p.create({ - peerInfo: peerInfos[i], + peerId: peerIds[i], + addresses, ...config })) if (started) { - await Promise.all(peers.map((p) => { - p.peerInfo.multiaddrs.add(listenAddr) - return p.start() - })) + await Promise.all(peers.map((p) => p.start())) + + populateAddressBooks && _populateAddressBooks(peers) } return peers } -/** - * Create Peer-ids. - * @param {Object} [properties] - * @param {number} [properties.number] number of peers (default: 1). - * @param {boolean} [properties.fixture] use fixture for peer-id generation (default: true) - * @return {Promise>} - */ -async function createPeerInfo ({ number = 1, fixture = true } = {}) { - const peerIds = await createPeerId({ number, fixture }) - - return pTimes(number, (i) => PeerInfo.create(peerIds[i])) +function _populateAddressBooks (peers) { + for (let i = 0; i < peers.length; i++) { + for (let j = 0; j < peers.length; j++) { + if (i !== j) { + peers[i].peerStore.addressBook.set(peers[j].peerId, peers[j].addresses.listen) + } + } + } } /** @@ -67,5 +65,4 @@ function createPeerId ({ number = 1, fixture = true } = {}) { } module.exports.createPeer = createPeer -module.exports.createPeerInfo = createPeerInfo module.exports.createPeerId = createPeerId diff --git a/test/utils/mockConnection.js b/test/utils/mockConnection.js index 022f5efce0..4d47dc709e 100644 --- a/test/utils/mockConnection.js +++ b/test/utils/mockConnection.js @@ -16,13 +16,13 @@ module.exports = async (properties = {}) => { const localAddr = multiaddr('/ip4/127.0.0.1/tcp/8080') const remoteAddr = multiaddr('/ip4/127.0.0.1/tcp/8081') - const [localPeer, remotePeer] = await peerUtils.createPeerInfo({ number: 2 }) + const [localPeer, remotePeer] = await peerUtils.createPeerId({ number: 2 }) const openStreams = [] let streamId = 0 return new Connection({ - localPeer: localPeer.id, - remotePeer: remotePeer.id, + localPeer: localPeer, + remotePeer: remotePeer, localAddr, remoteAddr, stat: { From 984d93360639587f1f26a36f3cf7a7d1f1ebb946 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Fri, 24 Apr 2020 13:01:04 +0200 Subject: [PATCH 091/102] chore: apply suggestions from code review Co-Authored-By: Jacob Heun --- doc/API.md | 2 +- src/dialer/index.js | 13 +++++++++---- src/{get-peer-id.js => get-peer.js} | 15 +++++++-------- src/identify/index.js | 2 +- src/index.js | 16 +++++++++------- 5 files changed, 27 insertions(+), 21 deletions(-) rename src/{get-peer-id.js => get-peer.js} (74%) diff --git a/doc/API.md b/doc/API.md index 2f70a1b432..7cf7418516 100644 --- a/doc/API.md +++ b/doc/API.md @@ -411,7 +411,7 @@ Once a content router succeeds, the iteration will stop. If the DHT is enabled, ```js // Iterate over the providers found for the given cid for await (const provider of libp2p.contentRouting.findProviders(cid)) { - console.log(provider.id, provider.addrs) + console.log(provider.id, provider.multiaddrs) } ``` diff --git a/src/dialer/index.js b/src/dialer/index.js index 92b04153c3..5e460ab3d3 100644 --- a/src/dialer/index.js +++ b/src/dialer/index.js @@ -9,7 +9,7 @@ const log = debug('libp2p:dialer') log.error = debug('libp2p:dialer:error') const { DialRequest } = require('./dial-request') -const getPeerId = require('../get-peer-id') +const getPeer = require('../get-peer') const { codes } = require('../errors') const { @@ -106,8 +106,13 @@ class Dialer { * @returns {DialTarget} */ _createDialTarget (peer) { - const peerId = getPeerId(peer, this.peerStore) - let addrs = this.peerStore.addressBook.getMultiaddrsForPeer(peerId) + const { id, multiaddrs } = getPeer(peer) + + if (multiaddrs) { + this.peerStore.addressBook.add(id, multiaddrs) + } + + let addrs = this.peerStore.addressBook.getMultiaddrsForPeer(id) // If received a multiaddr to dial, it should be the first to use // But, if we know other multiaddrs for the peer, we should try them too. @@ -117,7 +122,7 @@ class Dialer { } return { - id: peerId.toB58String(), + id: id.toB58String(), addrs } } diff --git a/src/get-peer-id.js b/src/get-peer.js similarity index 74% rename from src/get-peer-id.js rename to src/get-peer.js index 9eb687e8bb..cb6cba4c19 100644 --- a/src/get-peer-id.js +++ b/src/get-peer.js @@ -7,13 +7,13 @@ const errCode = require('err-code') const { codes } = require('./errors') /** - * Converts the given `peer` to a `PeerId` instance. + * Converts the given `peer` to a `Peer` object. * If a multiaddr is received, the addressBook is updated. * @param {PeerId|Multiaddr|string} peer * @param {PeerStore} peerStore - * @returns {PeerId} + * @returns {{ id: PeerId, multiaddrs: Array }} */ -function getPeerId (peer, peerStore) { +function getPeer (peer) { if (typeof peer === 'string') { peer = multiaddr(peer) } @@ -31,11 +31,10 @@ function getPeerId (peer, peerStore) { } } - if (addr && peerStore) { - peerStore.addressBook.add(peer, [addr]) + return { + id: peer, + multiaddrs: addr ? [addr] : undefined } - - return peer } -module.exports = getPeerId +module.exports = getPeer diff --git a/src/identify/index.js b/src/identify/index.js index f64e19f888..6fe8732fd6 100644 --- a/src/identify/index.js +++ b/src/identify/index.js @@ -48,7 +48,7 @@ class IdentifyService { * @param {Registrar} options.registrar * @param {Map} options.protocols A reference to the protocols we support * @param {PeerId} options.peerId The peer running the identify service - * @param {{ listen: Array}} options.addresses The peer aaddresses + * @param {{ listen: Array}} options.addresses The peer addresses */ constructor (options) { /** diff --git a/src/index.js b/src/index.js index b90382ed08..c25c69e46e 100644 --- a/src/index.js +++ b/src/index.js @@ -10,7 +10,7 @@ const PeerId = require('peer-id') const peerRouting = require('./peer-routing') const contentRouting = require('./content-routing') const pubsub = require('./pubsub') -const getPeerId = require('./get-peer-id') +const getPeer = require('./get-peer') const { validate: validateConfig } = require('./config') const { codes } = require('./errors') @@ -290,11 +290,13 @@ class Libp2p extends EventEmitter { * @returns {Promise} */ async dialProtocol (peer, protocols, options) { - const peerId = getPeerId(peer, this.peerStore) - let connection = this.registrar.getConnection(peerId) + const { id, multiaddrs } = getPeer(peer, this.peerStore) + let connection = this.registrar.getConnection(id) if (!connection) { connection = await this.dialer.connectToPeer(peer, options) + } else { + this.peerStore.addressBook.add(id, multiaddrs) } // If a protocol was provided, create a new stream @@ -311,10 +313,10 @@ class Libp2p extends EventEmitter { * @returns {Promise} */ hangUp (peer) { - const peerId = getPeerId(peer) + const { id } = getPeer(peer) return Promise.all( - this.registrar.connections.get(peerId.toB58String()).map(connection => { + this.registrar.connections.get(id.toB58String()).map(connection => { return connection.close() }) ) @@ -326,9 +328,9 @@ class Libp2p extends EventEmitter { * @returns {Promise} */ ping (peer) { - const peerId = getPeerId(peer) + const { id } = getPeer(peer) - return ping(this, peerId) + return ping(this, id) } /** From 872338840fb06a489abe0926b927fbf1a3b3c4ad Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Fri, 24 Apr 2020 15:54:59 +0200 Subject: [PATCH 092/102] chore: rename peer-store properties --- doc/API.md | 20 +++---- package.json | 2 +- src/content-routing.js | 4 +- src/index.js | 22 ++++---- src/peer-store/README.md | 4 +- src/peer-store/address-book.js | 76 +++++++++++++------------- src/peer-store/index.js | 24 ++++---- test/peer-routing/peer-routing.node.js | 4 +- test/peer-store/address-book.spec.js | 32 +++++------ test/peer-store/peer-store.spec.js | 32 +++++------ 10 files changed, 110 insertions(+), 110 deletions(-) diff --git a/doc/API.md b/doc/API.md index 7cf7418516..75c0af683f 100644 --- a/doc/API.md +++ b/doc/API.md @@ -381,7 +381,7 @@ Iterates over all peer routers in series to find the given peer. If the DHT is e ```js // ... -const peerData = await libp2p.peerRouting.findPeer(peerId, options) +const peer = await libp2p.peerRouting.findPeer(peerId, options) ``` ### contentRouting.findProviders @@ -585,7 +585,7 @@ peerStore.addressBook.delete(peerId) ### peerStore.addressBook.get -Get the known [`MultiaddrInfos`][multiaddr-info] of a provided peer. +Get the known [`Addresses`][address] of a provided peer. `peerStore.addressBook.get(peerId)` @@ -599,7 +599,7 @@ Get the known [`MultiaddrInfos`][multiaddr-info] of a provided peer. | Type | Description | |------|-------------| -| `Array` | Array of peer's multiaddr with their relevant information [`MultiaddrInfo`][multiaddr-info] | +| `Array
` | Array of peer's [`Addresses`][address] containing the multiaddr and its metadata | #### Example @@ -818,7 +818,7 @@ peerStore.delete(peerId2) ### peerStore.get -Get the stored information of a given peer, namely its [`PeerId`][peer-id], known [`MultiaddrInfos`][multiaddr-info] and supported protocols. +Get the stored information of a given peer, namely its [`PeerId`][peer-id], known [`Addresses`][address] and supported protocols. `peerStore.get(peerId)` @@ -832,7 +832,7 @@ Get the stored information of a given peer, namely its [`PeerId`][peer-id], know | Type | Description | |------|-------------| -| `{ id: PeerId, multiaddrInfos: Array, protocols: Array }` | Peer information of the provided peer | +| `{ id: PeerId, addresses: Array
, protocols: Array }` | Peer information of the provided peer | #### Example @@ -844,7 +844,7 @@ peerStore.protoBook.set(peerId, protocols) peerStore.get(peerId) // { // id: {}, -// MultiaddrInfos: [...], +// addresses: [...], // protocols: [...] // } ``` @@ -859,13 +859,13 @@ Get all the stored information of every peer. | Type | Description | |------|-------------| -| `Map, protocols: Array }>` | Peer data of every peer known | +| `Map, protocols: Array }>` | Peer data of every peer known | #### Example ```js -for (let [peerIdString, peerData] of peerStore.peers.entries()) { - // peerData +for (let [peerIdString, peer] of peerStore.peers.entries()) { + // peer { id, addresses, protocols } } ``` @@ -1184,7 +1184,7 @@ This event will be triggered anytime we are disconnected from another peer, rega - `['300000']`: The [MovingAverage](https://www.npmjs.com/package/moving-averages) at a 5 minute interval. - `['900000']`: The [MovingAverage](https://www.npmjs.com/package/moving-averages) at a 15 minute interval. -[multiaddr-info]: https://github.com/libp2p/js-libp2p/tree/master/src/peer-store/address-book.js +[address]: https://github.com/libp2p/js-libp2p/tree/master/src/peer-store/address-book.js [cid]: https://github.com/multiformats/js-cid [connection]: https://github.com/libp2p/js-interfaces/tree/master/src/connection [multiaddr]: https://github.com/multiformats/js-multiaddr diff --git a/package.json b/package.json index 0218decf39..ff99244bf0 100644 --- a/package.json +++ b/package.json @@ -93,7 +93,7 @@ "libp2p-delegated-peer-routing": "^0.5.0", "libp2p-floodsub": "^0.21.0", "libp2p-gossipsub": "^0.4.0", - "libp2p-kad-dht": "^0.19.0", + "libp2p-kad-dht": "libp2p/js-libp2p-kad-dht#chore/rename-peer-store-properties", "libp2p-mdns": "^0.14.0", "libp2p-mplex": "^0.9.1", "libp2p-secio": "^0.12.1", diff --git a/src/content-routing.js b/src/content-routing.js index bf63e773e6..71b2290ec8 100644 --- a/src/content-routing.js +++ b/src/content-routing.js @@ -42,8 +42,8 @@ module.exports = (node) => { }) ) - for (const peerData of result) { - yield peerData + for (const peer of result) { + yield peer } }, diff --git a/src/index.js b/src/index.js index c25c69e46e..c0b7fded57 100644 --- a/src/index.js +++ b/src/index.js @@ -267,7 +267,7 @@ class Libp2p extends EventEmitter { } /** - * Dials to the provided peer. If successful, the known `PeerData` of the + * Dials to the provided peer. If successful, the known `Peer` data of the * peer will be added to the nodes `peerStore` * @param {PeerId|Multiaddr|string} peer The peer to dial * @param {object} options @@ -280,7 +280,7 @@ class Libp2p extends EventEmitter { /** * Dials to the provided peer and handshakes with the given protocol. - * If successful, the known `PeerData` of the peer will be added to the nodes `peerStore`, + * If successful, the known `Peer` data of the peer will be added to the nodes `peerStore`, * and the `Connection` will be returned * @async * @param {PeerId|Multiaddr|string} peer The peer to dial @@ -295,7 +295,7 @@ class Libp2p extends EventEmitter { if (!connection) { connection = await this.dialer.connectToPeer(peer, options) - } else { + } else if (multiaddrs) { this.peerStore.addressBook.add(id, multiaddrs) } @@ -412,9 +412,9 @@ class Libp2p extends EventEmitter { await this._setupPeerDiscovery() // Once we start, emit and dial any peers we may have already discovered - for (const peerData of this.peerStore.peers.values()) { - this.emit('peer:discovery', peerData.id) - this._maybeConnect(peerData.id) + for (const peer of this.peerStore.peers.values()) { + this.emit('peer:discovery', peer.id) + this._maybeConnect(peer.id) } } @@ -422,16 +422,16 @@ class Libp2p extends EventEmitter { * Called whenever peer discovery services emit `peer` events. * Known peers may be emitted. * @private - * @param {PeerDara} peerData + * @param {PeerDara} peer */ - _onDiscoveryPeer (peerData) { - if (peerData.id.toB58String() === this.peerId.toB58String()) { + _onDiscoveryPeer (peer) { + if (peer.id.toB58String() === this.peerId.toB58String()) { log.error(new Error(codes.ERR_DISCOVERED_SELF)) return } - peerData.multiaddrs && this.peerStore.addressBook.add(peerData.id, peerData.multiaddrs) - peerData.protocols && this.peerStore.protoBook.set(peerData.id, peerData.protocols) + peer.multiaddrs && this.peerStore.addressBook.add(peer.id, peer.multiaddrs) + peer.protocols && this.peerStore.protoBook.set(peer.id, peer.protocols) } /** diff --git a/src/peer-store/README.md b/src/peer-store/README.md index bb309a0ddf..a1e7193541 100644 --- a/src/peer-store/README.md +++ b/src/peer-store/README.md @@ -40,9 +40,9 @@ The PeerStore wraps four main components: `addressBook`, `keyBook`, `protocolBoo The `addressBook` keeps the known multiaddrs of a peer. The multiaddrs of each peer may change over time and the Address Book must account for this. -`Map` +`Map` -A `peerId.toString()` identifier mapping to a `multiaddrInfo` object, which should have the following structure: +A `peerId.toString()` identifier mapping to a `Address` object, which should have the following structure: ```js { diff --git a/src/peer-store/address-book.js b/src/peer-store/address-book.js index 271c4cfea2..8488fb20b8 100644 --- a/src/peer-store/address-book.js +++ b/src/peer-store/address-book.js @@ -20,8 +20,8 @@ const { */ class AddressBook extends Book { /** - * MultiaddrInfo object - * @typedef {Object} MultiaddrInfo + * Address object + * @typedef {Object} Address * @property {Multiaddr} multiaddr peer multiaddr. */ @@ -38,37 +38,37 @@ class AddressBook extends Book { super(peerStore, 'change:multiaddrs', 'multiaddrs') /** - * Map known peers to their known multiaddrs. - * @type {Map>} + * Map known peers to their known Addresses. + * @type {Map>} */ this.data = new Map() } /** - * Set known addresses of a provided peer. + * Set known multiaddrs of a provided peer. * @override * @param {PeerId} peerId - * @param {Array} addresses + * @param {Array} multiaddrs * @returns {AddressBook} */ - set (peerId, addresses) { + set (peerId, multiaddrs) { if (!PeerId.isPeerId(peerId)) { log.error('peerId must be an instance of peer-id to store data') throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS) } - const multiaddrInfos = this._toMultiaddrInfos(addresses) + const addresses = this._toAddresses(multiaddrs) const id = peerId.toB58String() const rec = this.data.get(id) // Not replace multiaddrs - if (!multiaddrInfos.length) { + if (!addresses.length) { return this } // Already knows the peer - if (rec && rec.length === multiaddrInfos.length) { - const intersection = rec.filter((mi) => multiaddrInfos.some((newMi) => mi.multiaddr.equals(newMi.multiaddr))) + if (rec && rec.length === addresses.length) { + const intersection = rec.filter((mi) => addresses.some((newMi) => mi.multiaddr.equals(newMi.multiaddr))) // Are new addresses equal to the old ones? // If yes, no changes needed! @@ -78,7 +78,7 @@ class AddressBook extends Book { } } - this.data.set(id, multiaddrInfos) + this.data.set(id, addresses) this._setPeerId(peerId) log(`stored provided multiaddrs for ${id}`) @@ -89,7 +89,7 @@ class AddressBook extends Book { this._ps.emit('change:multiaddrs', { peerId, - multiaddrs: multiaddrInfos.map((mi) => mi.multiaddr) + multiaddrs: addresses.map((mi) => mi.multiaddr) }) return this @@ -100,41 +100,41 @@ class AddressBook extends Book { * If the peer is not known, it is set with the given addresses. * @override * @param {PeerId} peerId - * @param {Array} addresses + * @param {Array} multiaddrs * @returns {AddressBook} */ - add (peerId, addresses) { + add (peerId, multiaddrs) { if (!PeerId.isPeerId(peerId)) { log.error('peerId must be an instance of peer-id to store data') throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS) } - const multiaddrInfos = this._toMultiaddrInfos(addresses) + const addresses = this._toAddresses(multiaddrs) const id = peerId.toB58String() const rec = this.data.get(id) // Add recorded uniquely to the new array (Union) rec && rec.forEach((mi) => { - if (!multiaddrInfos.find(r => r.multiaddr.equals(mi.multiaddr))) { - multiaddrInfos.push(mi) + if (!addresses.find(r => r.multiaddr.equals(mi.multiaddr))) { + addresses.push(mi) } }) // If the recorded length is equal to the new after the unique union // The content is the same, no need to update. - if (rec && rec.length === multiaddrInfos.length) { + if (rec && rec.length === addresses.length) { log(`the addresses provided to store are already stored for ${id}`) return this } this._setPeerId(peerId) - this.data.set(id, multiaddrInfos) + this.data.set(id, addresses) log(`added provided multiaddrs for ${id}`) this._ps.emit('change:multiaddrs', { peerId, - multiaddrs: multiaddrInfos.map((mi) => mi.multiaddr) + multiaddrs: addresses.map((mi) => mi.multiaddr) }) // Notify the existance of a new peer @@ -146,30 +146,30 @@ class AddressBook extends Book { } /** - * Transforms received multiaddrs into MultiaddrInfo. - * @param {Array} addresses - * @returns {Array} + * Transforms received multiaddrs into Address. + * @param {Array} multiaddrs + * @returns {Array
} */ - _toMultiaddrInfos (addresses) { - if (!addresses) { - log.error('addresses must be provided to store data') - throw errcode(new Error('addresses must be provided'), ERR_INVALID_PARAMETERS) + _toAddresses (multiaddrs) { + if (!multiaddrs) { + log.error('multiaddrs must be provided to store data') + throw errcode(new Error('multiaddrs must be provided'), ERR_INVALID_PARAMETERS) } - // create MultiaddrInfo for each address - const multiaddrInfos = [] - addresses.forEach((addr) => { + // create Address for each address + const addresses = [] + multiaddrs.forEach((addr) => { if (!multiaddr.isMultiaddr(addr)) { log.error(`multiaddr ${addr} must be an instance of multiaddr`) throw errcode(new Error(`multiaddr ${addr} must be an instance of multiaddr`), ERR_INVALID_PARAMETERS) } - multiaddrInfos.push({ + addresses.push({ multiaddr: addr }) }) - return multiaddrInfos + return addresses } /** @@ -189,13 +189,13 @@ class AddressBook extends Book { return undefined } - return record.map((multiaddrInfo) => { - const addr = multiaddrInfo.multiaddr + return record.map((address) => { + const multiaddr = address.multiaddr - const idString = addr.getPeerId() - if (idString && idString === peerId.toB58String()) return addr + const idString = multiaddr.getPeerId() + if (idString && idString === peerId.toB58String()) return multiaddr - return addr.encapsulate(`/p2p/${peerId.toB58String()}`) + return multiaddr.encapsulate(`/p2p/${peerId.toB58String()}`) }) } } diff --git a/src/peer-store/index.js b/src/peer-store/index.js index 2f3b25ea26..6888191af9 100644 --- a/src/peer-store/index.js +++ b/src/peer-store/index.js @@ -23,10 +23,10 @@ const { */ class PeerStore extends EventEmitter { /** - * PeerData object - * @typedef {Object} PeerData + * Peer object + * @typedef {Object} Peer * @property {PeerId} id peer's peer-id instance. - * @property {Array} multiaddrsInfos peer's information of the multiaddrs. + * @property {Array
} addresses peer's addresses containing its multiaddrs and metadata. * @property {Array} protocols peer's supported protocols. */ @@ -34,7 +34,7 @@ class PeerStore extends EventEmitter { super() /** - * AddressBook containing a map of peerIdStr to multiaddrsInfo + * AddressBook containing a map of peerIdStr to Address */ this.addressBook = new AddressBook(this) @@ -53,17 +53,17 @@ class PeerStore extends EventEmitter { /** * Get all the stored information of every peer. - * @returns {Map} + * @returns {Map} */ get peers () { const peersData = new Map() // AddressBook - for (const [idStr, multiaddrInfos] of this.addressBook.data.entries()) { + for (const [idStr, addresses] of this.addressBook.data.entries()) { const id = PeerId.createFromCID(idStr) peersData.set(idStr, { id, - multiaddrInfos, + addresses, protocols: this.protoBook.get(id) || [] }) } @@ -75,7 +75,7 @@ class PeerStore extends EventEmitter { if (!pData) { peersData.set(idStr, { id: PeerId.createFromCID(idStr), - multiaddrInfos: [], + addresses: [], protocols: Array.from(protocols) }) } @@ -98,7 +98,7 @@ class PeerStore extends EventEmitter { /** * Get the stored information of a given peer. * @param {PeerId} peerId - * @returns {PeerData} + * @returns {Peer} */ get (peerId) { if (!PeerId.isPeerId(peerId)) { @@ -106,16 +106,16 @@ class PeerStore extends EventEmitter { } const id = this.peerIds.get(peerId.toB58String()) - const multiaddrInfos = this.addressBook.get(peerId) + const addresses = this.addressBook.get(peerId) const protocols = this.protoBook.get(peerId) - if (!multiaddrInfos && !protocols) { + if (!addresses && !protocols) { return undefined } return { id: id || peerId, - multiaddrInfos: multiaddrInfos || [], + addresses: addresses || [], protocols: protocols || [] } } diff --git a/test/peer-routing/peer-routing.node.js b/test/peer-routing/peer-routing.node.js index 460ff9c12d..f29ff3f7c6 100644 --- a/test/peer-routing/peer-routing.node.js +++ b/test/peer-routing/peer-routing.node.js @@ -121,9 +121,9 @@ describe('peer-routing', () => { 'X-Chunked-Output', '1' ]) - const peerData = await node.peerRouting.findPeer(peerKey) + const peer = await node.peerRouting.findPeer(peerKey) - expect(peerData.id).to.equal(peerKey) + expect(peer.id).to.equal(peerKey) expect(mockApi.isDone()).to.equal(true) }) diff --git a/test/peer-store/address-book.spec.js b/test/peer-store/address-book.spec.js index fba8d72c64..62e5a487ae 100644 --- a/test/peer-store/address-book.spec.js +++ b/test/peer-store/address-book.spec.js @@ -69,8 +69,8 @@ describe('addressBook', () => { }) ab.set(peerId, supportedMultiaddrs) - const multiaddrInfos = ab.get(peerId) - const multiaddrs = multiaddrInfos.map((mi) => mi.multiaddr) + const addresses = ab.get(peerId) + const multiaddrs = addresses.map((mi) => mi.multiaddr) expect(multiaddrs).to.have.deep.members(supportedMultiaddrs) return defer.promise @@ -95,8 +95,8 @@ describe('addressBook', () => { // set 2 (same content) ab.set(peerId, supportedMultiaddrsB) - const multiaddrInfos = ab.get(peerId) - const multiaddrs = multiaddrInfos.map((mi) => mi.multiaddr) + const addresses = ab.get(peerId) + const multiaddrs = addresses.map((mi) => mi.multiaddr) expect(multiaddrs).to.have.deep.members(supportedMultiaddrsB) await defer.promise @@ -177,14 +177,14 @@ describe('addressBook', () => { // Replace ab.set(peerId, supportedMultiaddrsA) - let multiaddrInfos = ab.get(peerId) - let multiaddrs = multiaddrInfos.map((mi) => mi.multiaddr) + let addresses = ab.get(peerId) + let multiaddrs = addresses.map((mi) => mi.multiaddr) expect(multiaddrs).to.have.deep.members(supportedMultiaddrsA) // Add ab.add(peerId, supportedMultiaddrsB) - multiaddrInfos = ab.get(peerId) - multiaddrs = multiaddrInfos.map((mi) => mi.multiaddr) + addresses = ab.get(peerId) + multiaddrs = addresses.map((mi) => mi.multiaddr) expect(multiaddrs).to.have.deep.members(finalMultiaddrs) return defer.promise @@ -210,8 +210,8 @@ describe('addressBook', () => { // set 2 (content already existing) ab.add(peerId, supportedMultiaddrsB) - const multiaddrInfos = ab.get(peerId) - const multiaddrs = multiaddrInfos.map((mi) => mi.multiaddr) + const addresses = ab.get(peerId) + const multiaddrs = addresses.map((mi) => mi.multiaddr) expect(multiaddrs).to.have.deep.members(finalMultiaddrs) await defer.promise @@ -261,9 +261,9 @@ describe('addressBook', () => { }) it('returns undefined if no multiaddrs are known for the provided peer', () => { - const multiaddrInfos = ab.get(peerId) + const addresses = ab.get(peerId) - expect(multiaddrInfos).to.not.exist() + expect(addresses).to.not.exist() }) it('returns the multiaddrs stored', () => { @@ -271,8 +271,8 @@ describe('addressBook', () => { ab.set(peerId, supportedMultiaddrs) - const multiaddrInfos = ab.get(peerId) - const multiaddrs = multiaddrInfos.map((mi) => mi.multiaddr) + const addresses = ab.get(peerId) + const multiaddrs = addresses.map((mi) => mi.multiaddr) expect(multiaddrs).to.have.deep.members(supportedMultiaddrs) }) }) @@ -292,9 +292,9 @@ describe('addressBook', () => { }) it('returns undefined if no multiaddrs are known for the provided peer', () => { - const multiaddrInfos = ab.getMultiaddrsForPeer(peerId) + const addresses = ab.getMultiaddrsForPeer(peerId) - expect(multiaddrInfos).to.not.exist() + expect(addresses).to.not.exist() }) it('returns the multiaddrs stored', () => { diff --git a/test/peer-store/peer-store.spec.js b/test/peer-store/peer-store.spec.js index c5568763a1..c4f9fbed39 100644 --- a/test/peer-store/peer-store.spec.js +++ b/test/peer-store/peer-store.spec.js @@ -45,8 +45,8 @@ describe('peer-store', () => { }) it('returns undefined on trying to find a non existant peerId', () => { - const peerInfo = peerStore.get(peerIds[0]) - expect(peerInfo).to.not.exist() + const peer = peerStore.get(peerIds[0]) + expect(peer).to.not.exist() }) }) @@ -102,29 +102,29 @@ describe('peer-store', () => { }) it('gets the stored information of a peer in all its books', () => { - const peerInfo = peerStore.get(peerIds[0]) - expect(peerInfo).to.exist() - expect(peerInfo.protocols).to.have.members([proto1]) + const peer = peerStore.get(peerIds[0]) + expect(peer).to.exist() + expect(peer.protocols).to.have.members([proto1]) - const peerMultiaddrs = peerInfo.multiaddrInfos.map((mi) => mi.multiaddr) + const peerMultiaddrs = peer.addresses.map((mi) => mi.multiaddr) expect(peerMultiaddrs).to.have.members([addr1, addr2]) }) it('gets the stored information of a peer that is not present in all its books', () => { - const peerInfo = peerStore.get(peerIds[2]) - expect(peerInfo).to.exist() - expect(peerInfo.protocols.length).to.eql(0) + const peers = peerStore.get(peerIds[2]) + expect(peers).to.exist() + expect(peers.protocols.length).to.eql(0) - const peerMultiaddrs = peerInfo.multiaddrInfos.map((mi) => mi.multiaddr) + const peerMultiaddrs = peers.addresses.map((mi) => mi.multiaddr) expect(peerMultiaddrs).to.have.members([addr4]) }) it('can find all the peers supporting a protocol', () => { const peerSupporting2 = [] - for (const [, peerInfo] of peerStore.peers.entries()) { - if (peerInfo.protocols.includes(proto2)) { - peerSupporting2.push(peerInfo) + for (const [, peer] of peerStore.peers.entries()) { + if (peer.protocols.includes(proto2)) { + peerSupporting2.push(peer) } } @@ -136,11 +136,11 @@ describe('peer-store', () => { it('can find all the peers listening on a given address', () => { const peerListenint4 = [] - for (const [, peerInfo] of peerStore.peers.entries()) { - const multiaddrs = peerInfo.multiaddrInfos.map((mi) => mi.multiaddr) + for (const [, peer] of peerStore.peers.entries()) { + const multiaddrs = peer.addresses.map((mi) => mi.multiaddr) if (multiaddrs.includes(addr4)) { - peerListenint4.push(peerInfo) + peerListenint4.push(peer) } } From 7627f96c4fdf1d539b393259ea023866cf21e150 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Mon, 27 Apr 2020 08:55:04 +0200 Subject: [PATCH 093/102] chore: use kad-dht with renamed peer-store properties --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index ff99244bf0..5b5d3bf8a6 100644 --- a/package.json +++ b/package.json @@ -93,7 +93,7 @@ "libp2p-delegated-peer-routing": "^0.5.0", "libp2p-floodsub": "^0.21.0", "libp2p-gossipsub": "^0.4.0", - "libp2p-kad-dht": "libp2p/js-libp2p-kad-dht#chore/rename-peer-store-properties", + "libp2p-kad-dht": "^0.19.1", "libp2p-mdns": "^0.14.0", "libp2p-mplex": "^0.9.1", "libp2p-secio": "^0.12.1", From a55a4dcd27964932506ae626deae494539fb8a7e Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Sat, 18 Apr 2020 17:06:56 +0200 Subject: [PATCH 094/102] chore: refactor connection manager and registrar --- doc/API.md | 48 ++++++++-- src/circuit/circuit/hop.js | 2 +- src/circuit/index.js | 3 +- src/connection-manager/index.js | 122 ++++++++++++++++++++++---- src/identify/index.js | 28 ++++-- src/index.js | 65 ++++++-------- src/metrics/index.js | 5 ++ src/registrar.js | 110 ++++------------------- test/connection-manager/index.node.js | 88 +++++++++++++++++++ test/connection-manager/index.spec.js | 16 ++-- test/dialing/direct.node.js | 8 +- test/dialing/relay.node.js | 6 +- test/identify/index.spec.js | 79 +++++++++-------- test/metrics/index.spec.js | 17 ++-- test/registrar/registrar.node.js | 72 --------------- test/registrar/registrar.spec.js | 101 ++++++++------------- test/registrar/utils.js | 51 ----------- test/upgrading/upgrader.spec.js | 14 +-- 18 files changed, 424 insertions(+), 411 deletions(-) create mode 100644 test/connection-manager/index.node.js delete mode 100644 test/registrar/registrar.node.js delete mode 100644 test/registrar/utils.js diff --git a/doc/API.md b/doc/API.md index 75c0af683f..50c0efcaef 100644 --- a/doc/API.md +++ b/doc/API.md @@ -11,12 +11,12 @@ * [`handle`](#handle) * [`unhandle`](#unhandle) * [`ping`](#ping) - * [`peerRouting.findPeer`](#peerroutingfindpeer) * [`contentRouting.findProviders`](#contentroutingfindproviders) * [`contentRouting.provide`](#contentroutingprovide) * [`contentRouting.put`](#contentroutingput) * [`contentRouting.get`](#contentroutingget) * [`contentRouting.getMany`](#contentroutinggetmany) + * [`peerRouting.findPeer`](#peerroutingfindpeer) * [`peerStore.addressBook.add`](#peerstoreaddressbookadd) * [`peerStore.addressBook.delete`](#peerstoreaddressbookdelete) * [`peerStore.addressBook.get`](#peerstoreaddressbookget) @@ -34,7 +34,9 @@ * [`pubsub.publish`](#pubsubpublish) * [`pubsub.subscribe`](#pubsubsubscribe) * [`pubsub.unsubscribe`](#pubsubunsubscribe) + * [`connectionManager.get`](#connectionmanagerget) * [`connectionManager.setPeerValue`](#connectionmanagersetpeervalue) + * [`connectionManager.size`](#connectionmanagersize) * [`metrics.global`](#metricsglobal) * [`metrics.peers`](#metricspeers) * [`metrics.protocols`](#metricsprotocols) @@ -42,6 +44,7 @@ * [`metrics.forProtocol`](#metricsforprotocol) * [Events](#events) * [`libp2p`](#libp2p) + * [`libp2p.connectionManager`](#libp2pconnectionmanager) * [`libp2p.peerStore`](#libp2ppeerStore) * [Types](#types) * [`Stats`](#stats) @@ -999,6 +1002,28 @@ const handler = (msg) => { libp2p.pubsub.unsubscribe(topic, handler) ``` +### connectionManager.get + +Get a connection with a given peer, if it exists. + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| peerId | [`PeerId`][peer-id] | The peer to find | + +#### Returns + +| Type | Description | +|------|-------------| +| [`Connection`][connection] | Connection with the given peer | + +#### Example + +```js +libp2p.connectionManager.get(peerId) +``` + ### connectionManager.setPeerValue Enables users to change the value of certain peers in a range of 0 to 1. Peers with the lowest values will have their Connections pruned first, if any Connection Manager limits are exceeded. See [./CONFIGURATION.md#configuring-connection-manager](./CONFIGURATION.md#configuring-connection-manager) for details on how to configure these limits. @@ -1025,6 +1050,17 @@ libp2p.connectionManager.setPeerValue(highPriorityPeerId, 1) libp2p.connectionManager.setPeerValue(lowPriorityPeerId, 0) ``` +### connectionManager.size + +Getter for obtaining the current number of open connections. + +#### Example + +```js +libp2p.connectionManager.size +// 10 +``` + ### metrics.global A [`Stats`](#stats) object of tracking the global bandwidth of the libp2p node. @@ -1126,21 +1162,23 @@ unless they are performing a specific action. See [peer discovery and auto dial] - `peer`: instance of [`PeerId`][peer-id] +### libp2p.connectionManager + #### A new connection to a peer has been opened This event will be triggered anytime a new Connection is established to another peer. -`libp2p.on('peer:connect', (peer) => {})` +`libp2p.on('peer:connect', (connection) => {})` -- `peer`: instance of [`PeerId`][peer-id] +- `connection`: instance of [`Connection`][connection] #### An existing connection to a peer has been closed This event will be triggered anytime we are disconnected from another peer, regardless of the circumstances of that disconnection. If we happen to have multiple connections to a peer, this event will **only** be triggered when the last connection is closed. -`libp2p.on('peer:disconnect', (peer) => {})` +`libp2p.on('peer:disconnect', (connection) => {})` -- `peer`: instance of [`PeerId`][peer-id] +- `connection`: instance of [`Connection`][connection] ### libp2p.peerStore diff --git a/src/circuit/circuit/hop.js b/src/circuit/circuit/hop.js index 0a5e71c8ba..c8ac0fddb0 100644 --- a/src/circuit/circuit/hop.js +++ b/src/circuit/circuit/hop.js @@ -41,7 +41,7 @@ module.exports.handleHop = async function handleHop ({ // Get the connection to the destination (stop) peer const destinationPeer = new PeerId(request.dstPeer.id) - const destinationConnection = circuit._registrar.getConnection(destinationPeer) + const destinationConnection = circuit._connectionManager.get(destinationPeer) if (!destinationConnection && !circuit._options.hop.active) { log('HOP request received but we are not connected to the destination peer') return streamHandler.end({ diff --git a/src/circuit/index.js b/src/circuit/index.js index be8326aaa7..c833f124ea 100644 --- a/src/circuit/index.js +++ b/src/circuit/index.js @@ -29,6 +29,7 @@ class Circuit { constructor ({ libp2p, upgrader }) { this._dialer = libp2p.dialer this._registrar = libp2p.registrar + this._connectionManager = libp2p.connectionManager this._upgrader = upgrader this._options = libp2p._config.relay this.addresses = libp2p.addresses @@ -107,7 +108,7 @@ class Circuit { const destinationPeer = PeerId.createFromCID(destinationAddr.getPeerId()) let disconnectOnFailure = false - let relayConnection = this._registrar.getConnection(relayPeer) + let relayConnection = this._connectionManager.get(relayPeer) if (!relayConnection) { relayConnection = await this._dialer.connectToPeer(relayAddr, options) disconnectOnFailure = true diff --git a/src/connection-manager/index.js b/src/connection-manager/index.js index 0868226f56..d6b50324ca 100644 --- a/src/connection-manager/index.js +++ b/src/connection-manager/index.js @@ -6,6 +6,11 @@ const LatencyMonitor = require('latency-monitor').default const debug = require('debug')('libp2p:connection-manager') const retimer = require('retimer') +const { EventEmitter } = require('events') + +const PeerId = require('peer-id') +const { Connection } = require('libp2p-interfaces/src/connection') + const { ERR_INVALID_PARAMETERS } = require('../errors') @@ -22,7 +27,12 @@ const defaultOptions = { defaultPeerValue: 1 } -class ConnectionManager { +/** + * Responsible for managing known connections. + * @fires ConnectionManager#peer:connect Emitted when a new peer is connected. + * @fires ConnectionManager#peer:disconnect Emitted when a peer is disconnected. + */ +class ConnectionManager extends EventEmitter { /** * @constructor * @param {Libp2p} libp2p @@ -38,9 +48,11 @@ class ConnectionManager { * @param {Number} options.defaultPeerValue The value of the peer. Default=1 */ constructor (libp2p, options) { + super() + this._libp2p = libp2p - this._registrar = libp2p.registrar this._peerId = libp2p.peerId.toB58String() + this._options = mergeOptions.call({ ignoreUndefined: true }, defaultOptions, options) if (this._options.maxConnections < this._options.minConnections) { throw errcode(new Error('Connection Manager maxConnections must be greater than minConnections'), ERR_INVALID_PARAMETERS) @@ -48,20 +60,38 @@ class ConnectionManager { debug('options: %j', this._options) - this._metrics = libp2p.metrics + this._libp2p = libp2p + /** + * Map of peer identifiers to their peer value for pruning connections. + * @type {Map} + */ this._peerValues = new Map() - this._connections = new Map() + + /** + * Map of connections per peer + * @type {Map>} + */ + this.connections = new Map() + this._timer = null this._checkMetrics = this._checkMetrics.bind(this) } + /** + * Get current number of open connections. + */ + get size () { + return Array.from(this.connections.values()) + .reduce((accumulator, value) => accumulator + value.length, 0) + } + /** * Starts the Connection Manager. If Metrics are not enabled on libp2p * only event loop and connection limits will be monitored. */ start () { - if (this._metrics) { + if (this._libp2p.metrics) { this._timer = this._timer || retimer(this._checkMetrics, this._options.pollInterval) } @@ -77,13 +107,33 @@ class ConnectionManager { /** * Stops the Connection Manager + * @async */ - stop () { + async stop () { this._timer && this._timer.clear() this._latencyMonitor && this._latencyMonitor.removeListener('data', this._onLatencyMeasure) + + await this._close() debug('stopped') } + /** + * Cleans up the connections + * @async + */ + async _close () { + // Close all connections we're tracking + const tasks = [] + for (const connectionList of this.connections.values()) { + for (const connection of connectionList) { + tasks.push(connection.close()) + } + } + + await tasks + this.connections.clear() + } + /** * Sets the value of the given peer. Peers with lower values * will be disconnected first. @@ -106,7 +156,7 @@ class ConnectionManager { * @private */ _checkMetrics () { - const movingAverages = this._metrics.global.movingAverages + const movingAverages = this._libp2p.metrics.global.movingAverages const received = movingAverages.dataReceived[this._options.movingAverageInterval].movingAverage() this._checkLimit('maxReceivedData', received) const sent = movingAverages.dataSent[this._options.movingAverageInterval].movingAverage() @@ -122,12 +172,25 @@ class ConnectionManager { * @param {Connection} connection */ onConnect (connection) { + if (!Connection.isConnection(connection)) { + throw errcode(new Error('conn must be an instance of interface-connection'), ERR_INVALID_PARAMETERS) + } + const peerId = connection.remotePeer.toB58String() - this._connections.set(connection.id, connection) + const storedConn = this.connections.get(peerId) + + if (storedConn) { + storedConn.push(connection) + } else { + this.connections.set(peerId, [connection]) + this.emit('peer:connect', connection) + } + if (!this._peerValues.has(peerId)) { this._peerValues.set(peerId, this._options.defaultPeerValue) } - this._checkLimit('maxConnections', this._connections.size) + + this._checkLimit('maxConnections', this.size) } /** @@ -135,8 +198,37 @@ class ConnectionManager { * @param {Connection} connection */ onDisconnect (connection) { - this._connections.delete(connection.id) - this._peerValues.delete(connection.remotePeer.toB58String()) + const peerId = connection.remotePeer.toB58String() + let storedConn = this.connections.get(peerId) + + if (storedConn && storedConn.length > 1) { + storedConn = storedConn.filter((conn) => conn.id !== connection.id) + this.connections.set(peerId, storedConn) + } else if (storedConn) { + this.connections.delete(peerId) + this._peerValues.delete(connection.remotePeer.toB58String()) + this.emit('peer:disconnect', connection) + } + } + + /** + * Get a connection with a peer. + * @param {PeerId} peerId + * @returns {Connection} + */ + get (peerId) { + if (!PeerId.isPeerId(peerId)) { + throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS) + } + + const id = peerId.toB58String() + const connections = this.connections.get(id) + + // Return the first, open connection + if (connections) { + return connections.find(connection => connection.stat.status === 'open') + } + return null } /** @@ -169,7 +261,7 @@ class ConnectionManager { * @private */ _maybeDisconnectOne () { - if (this._options.minConnections < this._connections.size) { + if (this._options.minConnections < this.connections.size) { const peerValues = Array.from(this._peerValues).sort(byPeerValue) debug('%s: sorted peer values: %j', this._peerId, peerValues) const disconnectPeer = peerValues[0] @@ -177,9 +269,9 @@ class ConnectionManager { const peerId = disconnectPeer[0] debug('%s: lowest value peer is %s', this._peerId, peerId) debug('%s: closing a connection to %j', this._peerId, peerId) - for (const connection of this._connections.values()) { - if (connection.remotePeer.toB58String() === peerId) { - connection.close() + for (const connections of this.connections.values()) { + if (connections[0].remotePeer.toB58String() === peerId) { + connections[0].close() break } } diff --git a/src/identify/index.js b/src/identify/index.js index 6fe8732fd6..d8ea46e857 100644 --- a/src/identify/index.js +++ b/src/identify/index.js @@ -45,16 +45,28 @@ class IdentifyService { /** * @constructor * @param {object} options - * @param {Registrar} options.registrar + * @param {PeerStore} options.peerStore + * @param {ConnectionManager} options.connectionManager * @param {Map} options.protocols A reference to the protocols we support * @param {PeerId} options.peerId The peer running the identify service * @param {{ listen: Array}} options.addresses The peer addresses */ constructor (options) { /** - * @property {Registrar} + * @property {PeerStore} */ - this.registrar = options.registrar + this.peerStore = options.peerStore + + /** + * @property {ConnectionManager} + */ + this.connectionManager = options.connectionManager + this.connectionManager.on('peer:connect', (connection) => { + const peerId = connection.remotePeer + + this.identify(connection, peerId).catch(log.error) + }) + /** * @property {PeerId} */ @@ -103,7 +115,7 @@ class IdentifyService { const connections = [] let connection for (const peer of peerStore.peers.values()) { - if (peer.protocols.includes(MULTICODEC_IDENTIFY_PUSH) && (connection = this.registrar.getConnection(peer.id))) { + if (peer.protocols.includes(MULTICODEC_IDENTIFY_PUSH) && (connection = this.connectionManager.get(peer.id))) { connections.push(connection) } } @@ -159,8 +171,8 @@ class IdentifyService { observedAddr = IdentifyService.getCleanMultiaddr(observedAddr) // Update peers data in PeerStore - this.registrar.peerStore.addressBook.set(id, listenAddrs.map((addr) => multiaddr(addr))) - this.registrar.peerStore.protoBook.set(id, protocols) + this.peerStore.addressBook.set(id, listenAddrs.map((addr) => multiaddr(addr))) + this.peerStore.protoBook.set(id, protocols) // TODO: Track our observed address so that we can score it log('received observed address of %s', observedAddr) @@ -244,13 +256,13 @@ class IdentifyService { // Update peers data in PeerStore const id = connection.remotePeer try { - this.registrar.peerStore.addressBook.set(id, message.listenAddrs.map((addr) => multiaddr(addr))) + this.peerStore.addressBook.set(id, message.listenAddrs.map((addr) => multiaddr(addr))) } catch (err) { return log.error('received invalid listen addrs', err) } // Update the protocols - this.registrar.peerStore.protoBook.set(id, message.protocols) + this.peerStore.protoBook.set(id, message.protocols) } } diff --git a/src/index.js b/src/index.js index c0b7fded57..b32c2f7d0c 100644 --- a/src/index.js +++ b/src/index.js @@ -53,55 +53,40 @@ class Libp2p extends EventEmitter { this._transport = [] // Transport instances/references this._discovery = new Map() // Discovery service instances/references + // Create the Connection Manager + this.connectionManager = new ConnectionManager(this, this._options.connectionManager) + + // Create Metrics if (this._options.metrics.enabled) { - this.metrics = new Metrics(this._options.metrics) + this.metrics = new Metrics({ + ...this._options.metrics, + connectionManager: this.connectionManager + }) } // Setup the Upgrader this.upgrader = new Upgrader({ localPeer: this.peerId, metrics: this.metrics, - onConnection: (connection) => { - const peerId = connection.remotePeer - - this.registrar.onConnect(peerId, connection) - this.connectionManager.onConnect(connection) - this.emit('peer:connect', peerId) - - // Run identify for every connection - if (this.identifyService) { - this.identifyService.identify(connection, peerId) - .catch(log.error) - } - }, - onConnectionEnd: (connection) => { - const peerId = connection.remotePeer - - this.registrar.onDisconnect(peerId, connection) - this.connectionManager.onDisconnect(connection) - - // If there are no connections to the peer, disconnect - if (!this.registrar.getConnection(peerId)) { - this.emit('peer:disconnect', peerId) - this.metrics && this.metrics.onPeerDisconnected(peerId) - } - } + onConnection: (connection) => this.connectionManager.onConnect(connection), + onConnectionEnd: (connection) => this.connectionManager.onDisconnect(connection) }) - // Create the Registrar - this.registrar = new Registrar({ peerStore: this.peerStore }) - this.handle = this.handle.bind(this) - this.registrar.handle = this.handle - - // Create the Connection Manager - this.connectionManager = new ConnectionManager(this, this._options.connectionManager) - // Setup the transport manager this.transportManager = new TransportManager({ libp2p: this, upgrader: this.upgrader }) + // Create the Registrar + this.registrar = new Registrar({ + peerStore: this.peerStore, + connectionManager: this.connectionManager + }) + + this.handle = this.handle.bind(this) + this.registrar.handle = this.handle + // Attach crypto channels if (this._modules.connEncryption) { const cryptos = this._modules.connEncryption @@ -137,7 +122,8 @@ class Libp2p extends EventEmitter { // Add the identify service since we can multiplex this.identifyService = new IdentifyService({ - registrar: this.registrar, + peerStore: this.peerStore, + connectionManager: this.connectionManager, peerId: this.peerId, addresses: this.addresses, protocols: this.upgrader.protocols @@ -239,7 +225,6 @@ class Libp2p extends EventEmitter { ]) await this.transportManager.close() - await this.registrar.close() ping.unmount(this) this.dialer.destroy() @@ -291,7 +276,7 @@ class Libp2p extends EventEmitter { */ async dialProtocol (peer, protocols, options) { const { id, multiaddrs } = getPeer(peer, this.peerStore) - let connection = this.registrar.getConnection(id) + let connection = this.connectionManager.get(id) if (!connection) { connection = await this.dialer.connectToPeer(peer, options) @@ -316,7 +301,7 @@ class Libp2p extends EventEmitter { const { id } = getPeer(peer) return Promise.all( - this.registrar.connections.get(id.toB58String()).map(connection => { + this.connectionManager.connections.get(id.toB58String()).map(connection => { return connection.close() }) ) @@ -443,9 +428,9 @@ class Libp2p extends EventEmitter { */ async _maybeConnect (peerId) { // If auto dialing is on and we have no connection to the peer, check if we should dial - if (this._config.peerDiscovery.autoDial === true && !this.registrar.getConnection(peerId)) { + if (this._config.peerDiscovery.autoDial === true && !this.connectionManager.get(peerId)) { const minPeers = this._options.connectionManager.minPeers || 0 - if (minPeers > this.connectionManager._connections.size) { + if (minPeers > this.connectionManager.size) { log('connecting to discovered peer %s', peerId.toB58String()) try { await this.dialer.connectToPeer(peerId) diff --git a/src/metrics/index.js b/src/metrics/index.js index e687f87b4f..0a92c036c1 100644 --- a/src/metrics/index.js +++ b/src/metrics/index.js @@ -21,6 +21,7 @@ class Metrics { /** * * @param {object} options + * @param {ConnectionManager} options.connectionManager * @param {number} options.computeThrottleMaxQueueSize * @param {number} options.computeThrottleTimeout * @param {Array} options.movingAverageIntervals @@ -34,6 +35,10 @@ class Metrics { this._oldPeers = oldPeerLRU(this._options.maxOldPeersRetention) this._running = false this._onMessage = this._onMessage.bind(this) + this._connectionManager = options.connectionManager + this._connectionManager.on('peer:disconnect', (connection) => { + this.onPeerDisconnected(connection.remotePeer) + }) } /** diff --git a/src/registrar.js b/src/registrar.js index fbe7acb532..6aa601f003 100644 --- a/src/registrar.js +++ b/src/registrar.js @@ -5,13 +5,10 @@ const errcode = require('err-code') const log = debug('libp2p:peer-store') log.error = debug('libp2p:peer-store:error') -const PeerId = require('peer-id') - const { ERR_INVALID_PARAMETERS } = require('./errors') const Topology = require('libp2p-interfaces/src/topology') -const { Connection } = require('libp2p-interfaces/src/connection') /** * Responsible for notifying registered protocols of events in the network. @@ -20,18 +17,14 @@ class Registrar { /** * @param {Object} props * @param {PeerStore} props.peerStore + * @param {connectionManager} props.connectionManager * @constructor */ - constructor ({ peerStore }) { + constructor ({ peerStore, connectionManager }) { // Used on topology to listen for protocol changes this.peerStore = peerStore - /** - * Map of connections per peer - * TODO: this should be handled by connectionManager - * @type {Map>} - */ - this.connections = new Map() + this.connectionManager = connectionManager /** * Map of topologies @@ -41,6 +34,9 @@ class Registrar { this.topologies = new Map() this._handle = undefined + + this._onDisconnect = this._onDisconnect.bind(this) + this.connectionManager.on('peer:disconnect', this._onDisconnect) } get handle () { @@ -51,93 +47,13 @@ class Registrar { this._handle = handle } - /** - * Cleans up the registrar - * @async - */ - async close () { - // Close all connections we're tracking - const tasks = [] - for (const connectionList of this.connections.values()) { - for (const connection of connectionList) { - tasks.push(connection.close()) - } - } - - await tasks - this.connections.clear() - } - - /** - * Add a new connected peer to the record - * TODO: this should live in the ConnectionManager - * @param {PeerId} peerId - * @param {Connection} conn - * @returns {void} - */ - onConnect (peerId, conn) { - if (!PeerId.isPeerId(peerId)) { - throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS) - } - - if (!Connection.isConnection(conn)) { - throw errcode(new Error('conn must be an instance of interface-connection'), ERR_INVALID_PARAMETERS) - } - - const id = peerId.toB58String() - const storedConn = this.connections.get(id) - - if (storedConn) { - storedConn.push(conn) - } else { - this.connections.set(id, [conn]) - } - } - - /** - * Remove a disconnected peer from the record - * TODO: this should live in the ConnectionManager - * @param {PeerId} peerId - * @param {Connection} connection - * @param {Error} [error] - * @returns {void} - */ - onDisconnect (peerId, connection, error) { - if (!PeerId.isPeerId(peerId)) { - throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS) - } - - const id = peerId.toB58String() - let storedConn = this.connections.get(id) - - if (storedConn && storedConn.length > 1) { - storedConn = storedConn.filter((conn) => conn.id !== connection.id) - this.connections.set(id, storedConn) - } else if (storedConn) { - for (const [, topology] of this.topologies) { - topology.disconnect(peerId, error) - } - - this.connections.delete(id) - } - } - /** * Get a connection with a peer. * @param {PeerId} peerId * @returns {Connection} */ getConnection (peerId) { - if (!PeerId.isPeerId(peerId)) { - throw errcode(new Error('peerId must be an instance of peer-id'), ERR_INVALID_PARAMETERS) - } - - const connections = this.connections.get(peerId.toB58String()) - // Return the first, open connection - if (connections) { - return connections.find(connection => connection.stat.status === 'open') - } - return null + return this.connectionManager.get(peerId) } /** @@ -169,6 +85,18 @@ class Registrar { unregister (id) { return this.topologies.delete(id) } + + /** + * Remove a disconnected peer from the record + * @param {Connection} connection + * @param {Error} [error] + * @returns {void} + */ + _onDisconnect (connection, error) { + for (const [, topology] of this.topologies) { + topology.disconnect(connection.remotePeer, error) + } + } } module.exports = Registrar diff --git a/test/connection-manager/index.node.js b/test/connection-manager/index.node.js new file mode 100644 index 0000000000..bec446357f --- /dev/null +++ b/test/connection-manager/index.node.js @@ -0,0 +1,88 @@ +'use strict' +/* eslint-env mocha */ + +const chai = require('chai') +chai.use(require('dirty-chai')) +chai.use(require('chai-as-promised')) +const { expect } = chai +const sinon = require('sinon') + +const multiaddr = require('multiaddr') + +const peerUtils = require('../utils/creators/peer') +const mockConnection = require('../utils/mockConnection') +const baseOptions = require('../utils/base-options.browser') + +const listenMultiaddr = multiaddr('/ip4/127.0.0.1/tcp/15002/ws') + +describe('Connection Manager', () => { + let libp2p + + beforeEach(async () => { + [libp2p] = await peerUtils.createPeer({ + config: { + addresses: { + listen: [listenMultiaddr] + }, + modules: baseOptions.modules + } + }) + }) + + afterEach(() => libp2p.stop()) + + it('should filter connections on disconnect, removing the closed one', async () => { + const [localPeer, remotePeer] = await peerUtils.createPeerId({ number: 2 }) + + const conn1 = await mockConnection({ localPeer, remotePeer }) + const conn2 = await mockConnection({ localPeer, remotePeer }) + + const id = remotePeer.toB58String() + + // Add connection to the connectionManager + libp2p.connectionManager.onConnect(conn1) + libp2p.connectionManager.onConnect(conn2) + + expect(libp2p.connectionManager.connections.get(id).length).to.eql(2) + + conn2._stat.status = 'closed' + libp2p.connectionManager.onDisconnect(conn2) + + const peerConnections = libp2p.connectionManager.connections.get(id) + expect(peerConnections.length).to.eql(1) + expect(peerConnections[0]._stat.status).to.eql('open') + }) + + it('should add connection on dial and remove on node stop', async () => { + const [remoteLibp2p] = await peerUtils.createPeer({ + config: { + addresses: { + listen: [multiaddr('/ip4/127.0.0.1/tcp/15003/ws')] + }, + modules: baseOptions.modules + } + }) + + // Spy on emit for easy verification + sinon.spy(libp2p.connectionManager, 'emit') + sinon.spy(remoteLibp2p.connectionManager, 'emit') + + libp2p.peerStore.addressBook.set(remoteLibp2p.peerId, remoteLibp2p.addresses.listen) + await libp2p.dial(remoteLibp2p.peerId) + + // check connect event + expect(libp2p.connectionManager.emit.callCount).to.equal(1) + const [event, connection] = libp2p.connectionManager.emit.getCall(0).args + expect(event).to.equal('peer:connect') + expect(connection.remotePeer.isEqual(remoteLibp2p.peerId)).to.equal(true) + + const libp2pConn = libp2p.connectionManager.get(remoteLibp2p.peerId) + expect(libp2pConn).to.exist() + + const remoteConn = remoteLibp2p.connectionManager.get(libp2p.peerId) + expect(remoteConn).to.exist() + + await remoteLibp2p.stop() + expect(remoteLibp2p.connectionManager.size).to.eql(0) + }) +}) diff --git a/test/connection-manager/index.spec.js b/test/connection-manager/index.spec.js index ce4c6312dc..8ffb0ee19c 100644 --- a/test/connection-manager/index.spec.js +++ b/test/connection-manager/index.spec.js @@ -7,7 +7,7 @@ chai.use(require('chai-as-promised')) const { expect } = chai const sinon = require('sinon') -const { createPeer } = require('../utils/creators/peer') +const peerUtils = require('../utils/creators/peer') const mockConnection = require('../utils/mockConnection') const baseOptions = require('../utils/base-options.browser') @@ -20,7 +20,7 @@ describe('Connection Manager', () => { }) it('should be able to create without metrics', async () => { - [libp2p] = await createPeer({ + [libp2p] = await peerUtils.createPeer({ config: { modules: baseOptions.modules }, @@ -35,7 +35,7 @@ describe('Connection Manager', () => { }) it('should be able to create with metrics', async () => { - [libp2p] = await createPeer({ + [libp2p] = await peerUtils.createPeer({ config: { modules: baseOptions.modules, metrics: { @@ -49,12 +49,12 @@ describe('Connection Manager', () => { await libp2p.start() expect(spy).to.have.property('callCount', 1) - expect(libp2p.connectionManager._metrics).to.exist() + expect(libp2p.connectionManager._libp2p.metrics).to.exist() }) it('should close lowest value peer connection when the maximum has been reached', async () => { const max = 5 - ;[libp2p] = await createPeer({ + ;[libp2p] = await peerUtils.createPeer({ config: { modules: baseOptions.modules, connectionManager: { @@ -92,7 +92,7 @@ describe('Connection Manager', () => { it('should close connection when the maximum has been reached even without peer values', async () => { const max = 5 - ;[libp2p] = await createPeer({ + ;[libp2p] = await peerUtils.createPeer({ config: { modules: baseOptions.modules, connectionManager: { @@ -110,7 +110,7 @@ describe('Connection Manager', () => { const spy = sinon.spy() await Promise.all([...new Array(max + 1)].map(async () => { const connection = await mockConnection() - sinon.stub(connection, 'close').callsFake(() => spy()) + sinon.stub(connection, 'close').callsFake(() => spy()) // eslint-disable-line libp2p.connectionManager.onConnect(connection) })) @@ -119,7 +119,7 @@ describe('Connection Manager', () => { }) it('should fail if the connection manager has mismatched connection limit options', async () => { - await expect(createPeer({ + await expect(peerUtils.createPeer({ config: { modules: baseOptions.modules, connectionManager: { diff --git a/test/dialing/direct.node.js b/test/dialing/direct.node.js index d7c9e6fa02..e3bd7d7be5 100644 --- a/test/dialing/direct.node.js +++ b/test/dialing/direct.node.js @@ -373,8 +373,8 @@ describe('Dialing (direct, TCP)', () => { } // 1 connection, because we know the peer in the multiaddr - expect(libp2p.connectionManager._connections.size).to.equal(1) - expect(remoteLibp2p.connectionManager._connections.size).to.equal(1) + expect(libp2p.connectionManager.size).to.equal(1) + expect(remoteLibp2p.connectionManager.size).to.equal(1) }) it('should coalesce parallel dials to the same error on failure', async () => { @@ -408,8 +408,8 @@ describe('Dialing (direct, TCP)', () => { } // 1 connection, because we know the peer in the multiaddr - expect(libp2p.connectionManager._connections.size).to.equal(0) - expect(remoteLibp2p.connectionManager._connections.size).to.equal(0) + expect(libp2p.connectionManager.size).to.equal(0) + expect(remoteLibp2p.connectionManager.size).to.equal(0) }) }) }) diff --git a/test/dialing/relay.node.js b/test/dialing/relay.node.js index 57a5e8ac88..4b706225ec 100644 --- a/test/dialing/relay.node.js +++ b/test/dialing/relay.node.js @@ -120,7 +120,7 @@ describe('Dialing (via relay, TCP)', () => { .and.to.have.nested.property('._errors[0].code', Errors.ERR_HOP_REQUEST_FAILED) // We should not be connected to the relay, because we weren't before the dial - const srcToRelayConn = srcLibp2p.registrar.getConnection(relayLibp2p.peerId) + const srcToRelayConn = srcLibp2p.connectionManager.get(relayLibp2p.peerId) expect(srcToRelayConn).to.not.exist() }) @@ -137,7 +137,7 @@ describe('Dialing (via relay, TCP)', () => { .to.eventually.be.rejectedWith(AggregateError) .and.to.have.nested.property('._errors[0].code', Errors.ERR_HOP_REQUEST_FAILED) - const srcToRelayConn = srcLibp2p.registrar.getConnection(relayLibp2p.peerId) + const srcToRelayConn = srcLibp2p.connectionManager.get(relayLibp2p.peerId) expect(srcToRelayConn).to.exist() expect(srcToRelayConn.stat.status).to.equal('open') }) @@ -163,7 +163,7 @@ describe('Dialing (via relay, TCP)', () => { .to.eventually.be.rejectedWith(AggregateError) .and.to.have.nested.property('._errors[0].code', Errors.ERR_HOP_REQUEST_FAILED) - const dstToRelayConn = dstLibp2p.registrar.getConnection(relayLibp2p.peerId) + const dstToRelayConn = dstLibp2p.connectionManager.get(relayLibp2p.peerId) expect(dstToRelayConn).to.exist() expect(dstToRelayConn.stat.status).to.equal('open') }) diff --git a/test/identify/index.spec.js b/test/identify/index.spec.js index 648000a147..c10bda7b87 100644 --- a/test/identify/index.spec.js +++ b/test/identify/index.spec.js @@ -7,6 +7,7 @@ chai.use(require('chai-as-promised')) const { expect } = chai const sinon = require('sinon') +const { EventEmitter } = require('events') const delay = require('delay') const PeerId = require('peer-id') const duplexPair = require('it-pair/duplex') @@ -48,14 +49,13 @@ describe('Identify', () => { listen: [] }, protocols, - registrar: { - peerStore: { - addressBook: { - set: () => { } - }, - protoBook: { - set: () => { } - } + connectionManager: new EventEmitter(), + peerStore: { + addressBook: { + set: () => { } + }, + protoBook: { + set: () => { } } } }) @@ -64,7 +64,8 @@ describe('Identify', () => { addresses: { listen: [] }, - protocols + protocols, + connectionManager: new EventEmitter() }) const observedAddr = multiaddr('/ip4/127.0.0.1/tcp/1234') @@ -74,8 +75,8 @@ describe('Identify', () => { const [local, remote] = duplexPair() sinon.stub(localConnectionMock, 'newStream').returns({ stream: local, protocol: multicodecs.IDENTIFY }) - sinon.spy(localIdentify.registrar.peerStore.addressBook, 'set') - sinon.spy(localIdentify.registrar.peerStore.protoBook, 'set') + sinon.spy(localIdentify.peerStore.addressBook, 'set') + sinon.spy(localIdentify.peerStore.protoBook, 'set') // Run identify await Promise.all([ @@ -87,10 +88,10 @@ describe('Identify', () => { }) ]) - expect(localIdentify.registrar.peerStore.addressBook.set.callCount).to.equal(1) - expect(localIdentify.registrar.peerStore.protoBook.set.callCount).to.equal(1) + expect(localIdentify.peerStore.addressBook.set.callCount).to.equal(1) + expect(localIdentify.peerStore.protoBook.set.callCount).to.equal(1) // Validate the remote peer gets updated in the peer store - const call = localIdentify.registrar.peerStore.addressBook.set.firstCall + const call = localIdentify.peerStore.addressBook.set.firstCall expect(call.args[0].id.bytes).to.equal(remotePeer.bytes) }) @@ -101,14 +102,13 @@ describe('Identify', () => { listen: [] }, protocols, - registrar: { - peerStore: { - addressBook: { - set: () => { } - }, - protoBook: { - set: () => { } - } + connectionManager: new EventEmitter(), + peerStore: { + addressBook: { + set: () => { } + }, + protoBook: { + set: () => { } } } }) @@ -117,7 +117,8 @@ describe('Identify', () => { addresses: { listen: [] }, - protocols + protocols, + connectionManager: new EventEmitter() }) const observedAddr = multiaddr('/ip4/127.0.0.1/tcp/1234') @@ -145,12 +146,15 @@ describe('Identify', () => { describe('push', () => { it('should be able to push identify updates to another peer', async () => { const listeningAddr = multiaddr('/ip4/127.0.0.1/tcp/1234') + const connectionManager = new EventEmitter() + connectionManager.getConnection = () => {} + const localIdentify = new IdentifyService({ peerId: localPeer, addresses: { listen: [listeningAddr] }, - registrar: { getConnection: () => {} }, + connectionManager, protocols: new Map([ [multicodecs.IDENTIFY], [multicodecs.IDENTIFY_PUSH], @@ -162,14 +166,13 @@ describe('Identify', () => { addresses: { listen: [] }, - registrar: { - peerStore: { - addressBook: { - set: () => {} - }, - protoBook: { - set: () => { } - } + connectionManager, + peerStore: { + addressBook: { + set: () => { } + }, + protoBook: { + set: () => { } } } }) @@ -182,8 +185,8 @@ describe('Identify', () => { const [local, remote] = duplexPair() sinon.stub(localConnectionMock, 'newStream').returns({ stream: local, protocol: multicodecs.IDENTIFY_PUSH }) - sinon.spy(remoteIdentify.registrar.peerStore.addressBook, 'set') - sinon.spy(remoteIdentify.registrar.peerStore.protoBook, 'set') + sinon.spy(remoteIdentify.peerStore.addressBook, 'set') + sinon.spy(remoteIdentify.peerStore.protoBook, 'set') // Run identify await Promise.all([ @@ -195,12 +198,12 @@ describe('Identify', () => { }) ]) - expect(remoteIdentify.registrar.peerStore.addressBook.set.callCount).to.equal(1) - expect(remoteIdentify.registrar.peerStore.protoBook.set.callCount).to.equal(1) - const [peerId, multiaddrs] = remoteIdentify.registrar.peerStore.addressBook.set.firstCall.args + expect(remoteIdentify.peerStore.addressBook.set.callCount).to.equal(1) + expect(remoteIdentify.peerStore.protoBook.set.callCount).to.equal(1) + const [peerId, multiaddrs] = remoteIdentify.peerStore.addressBook.set.firstCall.args expect(peerId.bytes).to.eql(localPeer.bytes) expect(multiaddrs).to.eql([listeningAddr]) - const [peerId2, protocols] = remoteIdentify.registrar.peerStore.protoBook.set.firstCall.args + const [peerId2, protocols] = remoteIdentify.peerStore.protoBook.set.firstCall.args expect(peerId2.bytes).to.eql(localPeer.bytes) expect(protocols).to.eql(Array.from(localProtocols)) }) diff --git a/test/metrics/index.spec.js b/test/metrics/index.spec.js index d84250ef4d..da0c10d555 100644 --- a/test/metrics/index.spec.js +++ b/test/metrics/index.spec.js @@ -7,6 +7,8 @@ chai.use(require('chai-as-promised')) const { expect } = chai const sinon = require('sinon') +const { EventEmitter } = require('events') + const { randomBytes } = require('libp2p-crypto') const duplexPair = require('it-pair/duplex') const pipe = require('it-pipe') @@ -35,7 +37,8 @@ describe('Metrics', () => { const [local, remote] = duplexPair() const metrics = new Metrics({ computeThrottleMaxQueueSize: 1, // compute after every message - movingAverageIntervals: [10, 100, 1000] + movingAverageIntervals: [10, 100, 1000], + connectionManager: new EventEmitter() }) metrics.trackStream({ @@ -70,7 +73,8 @@ describe('Metrics', () => { const [local, remote] = duplexPair() const metrics = new Metrics({ computeThrottleMaxQueueSize: 1, // compute after every message - movingAverageIntervals: [10, 100, 1000] + movingAverageIntervals: [10, 100, 1000], + connectionManager: new EventEmitter() }) metrics.trackStream({ @@ -118,7 +122,8 @@ describe('Metrics', () => { const [local2, remote2] = duplexPair() const metrics = new Metrics({ computeThrottleMaxQueueSize: 1, // compute after every message - movingAverageIntervals: [10, 100, 1000] + movingAverageIntervals: [10, 100, 1000], + connectionManager: new EventEmitter() }) const protocol = '/echo/1.0.0' metrics.start() @@ -173,7 +178,8 @@ describe('Metrics', () => { const [local, remote] = duplexPair() const metrics = new Metrics({ computeThrottleMaxQueueSize: 1, // compute after every message - movingAverageIntervals: [10, 100, 1000] + movingAverageIntervals: [10, 100, 1000], + connectionManager: new EventEmitter() }) metrics.start() @@ -228,7 +234,8 @@ describe('Metrics', () => { })) const metrics = new Metrics({ - maxOldPeersRetention: 5 // Only keep track of 5 + maxOldPeersRetention: 5, // Only keep track of 5 + connectionManager: new EventEmitter() }) // Clone so trackedPeers isn't modified diff --git a/test/registrar/registrar.node.js b/test/registrar/registrar.node.js deleted file mode 100644 index f2829d918a..0000000000 --- a/test/registrar/registrar.node.js +++ /dev/null @@ -1,72 +0,0 @@ -'use strict' -/* eslint-env mocha */ - -const chai = require('chai') -chai.use(require('dirty-chai')) -const { expect } = chai -const sinon = require('sinon') - -const mergeOptions = require('merge-options') - -const multiaddr = require('multiaddr') -const Libp2p = require('../../src') - -const baseOptions = require('../utils/base-options') -const peerUtils = require('../utils/creators/peer') -const listenAddr = multiaddr('/ip4/127.0.0.1/tcp/0') - -describe('registrar on dial', () => { - let peerId - let remotePeerId - let libp2p - let remoteLibp2p - let remoteAddr - - before(async () => { - [peerId, remotePeerId] = await peerUtils.createPeerId({ number: 2 }) - remoteLibp2p = new Libp2p(mergeOptions(baseOptions, { - peerId: remotePeerId - })) - - await remoteLibp2p.transportManager.listen([listenAddr]) - remoteAddr = remoteLibp2p.transportManager.getAddrs()[0].encapsulate(`/p2p/${remotePeerId.toB58String()}`) - }) - - after(async () => { - sinon.restore() - await remoteLibp2p.stop() - libp2p && await libp2p.stop() - }) - - it('should inform registrar of a new connection', async () => { - libp2p = new Libp2p(mergeOptions(baseOptions, { - peerId - })) - - sinon.spy(remoteLibp2p.registrar, 'onConnect') - - await libp2p.dial(remoteAddr) - expect(remoteLibp2p.registrar.onConnect.callCount).to.equal(1) - - const libp2pConn = libp2p.registrar.getConnection(remotePeerId) - expect(libp2pConn).to.exist() - - const remoteConn = remoteLibp2p.registrar.getConnection(peerId) - expect(remoteConn).to.exist() - }) - - it('should be closed on libp2p stop', async () => { - libp2p = new Libp2p(mergeOptions(baseOptions, { - peerId - })) - - await libp2p.dial(remoteAddr) - expect(libp2p.connections.size).to.equal(1) - - sinon.spy(libp2p.registrar, 'close') - - await libp2p.stop() - expect(libp2p.registrar.close.callCount).to.equal(1) - expect(libp2p.connections.size).to.equal(0) - }) -}) diff --git a/test/registrar/registrar.spec.js b/test/registrar/registrar.spec.js index 696531e98b..7fd57991ac 100644 --- a/test/registrar/registrar.spec.js +++ b/test/registrar/registrar.spec.js @@ -6,21 +6,26 @@ chai.use(require('dirty-chai')) const { expect } = chai const pDefer = require('p-defer') +const { EventEmitter } = require('events') + const Topology = require('libp2p-interfaces/src/topology/multicodec-topology') const PeerStore = require('../../src/peer-store') const Registrar = require('../../src/registrar') -const { createMockConnection } = require('./utils') + +const createMockConnection = require('../utils/mockConnection') const peerUtils = require('../utils/creators/peer') +const baseOptions = require('../utils/base-options.browser') const multicodec = '/test/1.0.0' describe('registrar', () => { - let peerStore, registrar + let peerStore + let registrar describe('errors', () => { beforeEach(() => { peerStore = new PeerStore() - registrar = new Registrar({ peerStore }) + registrar = new Registrar({ peerStore, connectionManager: new EventEmitter() }) }) it('should fail to register a protocol if no multicodec is provided', () => { @@ -36,11 +41,19 @@ describe('registrar', () => { }) describe('registration', () => { - beforeEach(() => { - peerStore = new PeerStore() - registrar = new Registrar({ peerStore }) + let libp2p + + beforeEach(async () => { + [libp2p] = await peerUtils.createPeer({ + config: { + modules: baseOptions.modules + }, + started: false + }) }) + afterEach(() => libp2p.stop()) + it('should be able to register a protocol', () => { const topologyProps = new Topology({ multicodecs: multicodec, @@ -50,7 +63,7 @@ describe('registrar', () => { } }) - const identifier = registrar.register(topologyProps) + const identifier = libp2p.registrar.register(topologyProps) expect(identifier).to.exist() }) @@ -64,14 +77,14 @@ describe('registrar', () => { } }) - const identifier = registrar.register(topologyProps) - const success = registrar.unregister(identifier) + const identifier = libp2p.registrar.register(topologyProps) + const success = libp2p.registrar.unregister(identifier) expect(success).to.eql(true) }) it('should fail to unregister if no register was made', () => { - const success = registrar.unregister('bad-identifier') + const success = libp2p.registrar.unregister('bad-identifier') expect(success).to.eql(false) }) @@ -85,10 +98,10 @@ describe('registrar', () => { const remotePeerId = conn.remotePeer // Add connected peer with protocol to peerStore and registrar - peerStore.protoBook.add(remotePeerId, [multicodec]) + libp2p.peerStore.protoBook.add(remotePeerId, [multicodec]) - registrar.onConnect(remotePeerId, conn) - expect(registrar.connections.size).to.eql(1) + libp2p.connectionManager.onConnect(conn) + expect(libp2p.connectionManager.size).to.eql(1) const topologyProps = new Topology({ multicodecs: multicodec, @@ -108,14 +121,16 @@ describe('registrar', () => { }) // Register protocol - const identifier = registrar.register(topologyProps) - const topology = registrar.topologies.get(identifier) + const identifier = libp2p.registrar.register(topologyProps) + const topology = libp2p.registrar.topologies.get(identifier) // Topology created expect(topology).to.exist() - registrar.onDisconnect(remotePeerId) - expect(registrar.connections.size).to.eql(0) + await conn.close() + + libp2p.connectionManager.onDisconnect(conn) + expect(libp2p.connectionManager.size).to.eql(0) // Wait for handlers to be called return Promise.all([ @@ -141,68 +156,30 @@ describe('registrar', () => { }) // Register protocol - const identifier = registrar.register(topologyProps) - const topology = registrar.topologies.get(identifier) + const identifier = libp2p.registrar.register(topologyProps) + const topology = libp2p.registrar.topologies.get(identifier) // Topology created expect(topology).to.exist() - expect(registrar.connections.size).to.eql(0) + expect(libp2p.connectionManager.size).to.eql(0) // Setup connections before registrar const conn = await createMockConnection() const remotePeerId = conn.remotePeer // Add connected peer to peerStore and registrar - peerStore.protoBook.set(remotePeerId, []) - registrar.onConnect(remotePeerId, conn) + libp2p.peerStore.protoBook.set(remotePeerId, []) + libp2p.connectionManager.onConnect(conn) // Add protocol to peer and update it - peerStore.protoBook.add(remotePeerId, [multicodec]) + libp2p.peerStore.protoBook.add(remotePeerId, [multicodec]) await onConnectDefer.promise // Remove protocol to peer and update it - peerStore.protoBook.set(remotePeerId, []) + libp2p.peerStore.protoBook.set(remotePeerId, []) await onDisconnectDefer.promise }) - - it('should filter connections on disconnect, removing the closed one', async () => { - const onDisconnectDefer = pDefer() - - const topologyProps = new Topology({ - multicodecs: multicodec, - handlers: { - onConnect: () => {}, - onDisconnect: () => { - onDisconnectDefer.resolve() - } - } - }) - - // Register protocol - registrar.register(topologyProps) - - // Setup connections before registrar - const [localPeer, remotePeer] = await peerUtils.createPeerId({ number: 2 }) - - const conn1 = await createMockConnection({ localPeer, remotePeer }) - const conn2 = await createMockConnection({ localPeer, remotePeer }) - - const id = remotePeer.toB58String() - - // Add connection to registrar - registrar.onConnect(remotePeer, conn1) - registrar.onConnect(remotePeer, conn2) - - expect(registrar.connections.get(id).length).to.eql(2) - - conn2._stat.status = 'closed' - registrar.onDisconnect(remotePeer, conn2) - - const peerConnections = registrar.connections.get(id) - expect(peerConnections.length).to.eql(1) - expect(peerConnections[0]._stat.status).to.eql('open') - }) }) }) diff --git a/test/registrar/utils.js b/test/registrar/utils.js deleted file mode 100644 index 727d99b195..0000000000 --- a/test/registrar/utils.js +++ /dev/null @@ -1,51 +0,0 @@ -'use strict' - -const { Connection } = require('libp2p-interfaces/src/connection') -const multiaddr = require('multiaddr') - -const pair = require('it-pair') - -const peerUtils = require('../utils/creators/peer') - -module.exports.createMockConnection = async (properties = {}) => { - const localAddr = multiaddr('/ip4/127.0.0.1/tcp/8080') - const remoteAddr = multiaddr('/ip4/127.0.0.1/tcp/8081') - - const [localPeer, remotePeer] = await peerUtils.createPeerId({ number: 2 }) - const openStreams = [] - let streamId = 0 - - return new Connection({ - localPeer: localPeer, - remotePeer: remotePeer, - localAddr, - remoteAddr, - stat: { - timeline: { - open: Date.now() - 10, - upgraded: Date.now() - }, - direction: 'outbound', - encryption: '/secio/1.0.0', - multiplexer: '/mplex/6.7.0', - status: 'open' - }, - newStream: (protocols) => { - const id = streamId++ - const stream = pair() - - stream.close = () => stream.sink([]) - stream.id = id - - openStreams.push(stream) - - return { - stream, - protocol: protocols[0] - } - }, - close: () => { }, - getStreams: () => openStreams, - ...properties - }) -} diff --git a/test/upgrading/upgrader.spec.js b/test/upgrading/upgrader.spec.js index 6579bfe347..484470c3fb 100644 --- a/test/upgrading/upgrader.spec.js +++ b/test/upgrading/upgrader.spec.js @@ -420,24 +420,24 @@ describe('libp2p.upgrader', () => { const { inbound, outbound } = mockMultiaddrConnPair({ addrs, remotePeer }) // Spy on emit for easy verification - sinon.spy(libp2p, 'emit') + sinon.spy(libp2p.connectionManager, 'emit') // Upgrade and check the connect event const connections = await Promise.all([ libp2p.upgrader.upgradeOutbound(outbound), remoteUpgrader.upgradeInbound(inbound) ]) - expect(libp2p.emit.callCount).to.equal(1) + expect(libp2p.connectionManager.emit.callCount).to.equal(1) - let [event, peerId] = libp2p.emit.getCall(0).args + let [event, connection] = libp2p.connectionManager.emit.getCall(0).args expect(event).to.equal('peer:connect') - expect(peerId.isEqual(remotePeer)).to.equal(true) + expect(connection.remotePeer.isEqual(remotePeer)).to.equal(true) // Close and check the disconnect event await Promise.all(connections.map(conn => conn.close())) - expect(libp2p.emit.callCount).to.equal(2) - ;([event, peerId] = libp2p.emit.getCall(1).args) + expect(libp2p.connectionManager.emit.callCount).to.equal(2) + ;([event, connection] = libp2p.connectionManager.emit.getCall(1).args) expect(event).to.equal('peer:disconnect') - expect(peerId.isEqual(remotePeer)).to.equal(true) + expect(connection.remotePeer.isEqual(remotePeer)).to.equal(true) }) }) From e87b42bc7887bf46a03c5b6defbe90c8152f0644 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Mon, 27 Apr 2020 16:11:25 +0200 Subject: [PATCH 095/102] chore: apply suggestions from code review Co-Authored-By: Jacob Heun --- doc/API.md | 4 ++-- src/connection-manager/index.js | 5 ----- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/doc/API.md b/doc/API.md index 50c0efcaef..800807047b 100644 --- a/doc/API.md +++ b/doc/API.md @@ -1168,7 +1168,7 @@ unless they are performing a specific action. See [peer discovery and auto dial] This event will be triggered anytime a new Connection is established to another peer. -`libp2p.on('peer:connect', (connection) => {})` +`libp2p.connectionManager.on('peer:connect', (connection) => {})` - `connection`: instance of [`Connection`][connection] @@ -1176,7 +1176,7 @@ This event will be triggered anytime a new Connection is established to another This event will be triggered anytime we are disconnected from another peer, regardless of the circumstances of that disconnection. If we happen to have multiple connections to a peer, this event will **only** be triggered when the last connection is closed. -`libp2p.on('peer:disconnect', (connection) => {})` +`libp2p.connectionManager.on('peer:disconnect', (connection) => {})` - `connection`: instance of [`Connection`][connection] diff --git a/src/connection-manager/index.js b/src/connection-manager/index.js index d6b50324ca..b6aad603b0 100644 --- a/src/connection-manager/index.js +++ b/src/connection-manager/index.js @@ -9,7 +9,6 @@ const retimer = require('retimer') const { EventEmitter } = require('events') const PeerId = require('peer-id') -const { Connection } = require('libp2p-interfaces/src/connection') const { ERR_INVALID_PARAMETERS @@ -172,10 +171,6 @@ class ConnectionManager extends EventEmitter { * @param {Connection} connection */ onConnect (connection) { - if (!Connection.isConnection(connection)) { - throw errcode(new Error('conn must be an instance of interface-connection'), ERR_INVALID_PARAMETERS) - } - const peerId = connection.remotePeer.toB58String() const storedConn = this.connections.get(peerId) From 7e76d0f6499dd6a862032846875574b6c71cf154 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Sat, 18 Apr 2020 23:26:46 +0200 Subject: [PATCH 096/102] feat: address manager --- doc/API.md | 71 +++++++--- doc/CONFIGURATION.md | 2 + package.json | 2 +- src/address-manager/README.md | 49 +++++++ src/address-manager/index.js | 55 ++++++++ src/circuit/index.js | 4 +- src/config.js | 4 +- src/identify/index.js | 21 +-- src/index.js | 68 ++++++++-- src/transport-manager.js | 8 +- test/addresses/address-manager.spec.js | 93 +++++++++++++ test/addresses/addresses.node.js | 152 ++++++++++++++++++++++ test/addresses/utils.js | 16 +++ test/core/listening.node.js | 2 +- test/core/ping.node.js | 4 +- test/dialing/direct.node.js | 11 +- test/dialing/relay.node.js | 15 ++- test/identify/index.spec.js | 102 +++++++-------- test/peer-discovery/index.node.js | 4 +- test/pubsub/implementations.node.js | 2 +- test/pubsub/operation.node.js | 4 +- test/transports/transport-manager.node.js | 10 +- test/transports/transport-manager.spec.js | 10 +- test/utils/creators/peer.js | 5 +- 24 files changed, 593 insertions(+), 121 deletions(-) create mode 100644 src/address-manager/README.md create mode 100644 src/address-manager/index.js create mode 100644 test/addresses/address-manager.spec.js create mode 100644 test/addresses/addresses.node.js create mode 100644 test/addresses/utils.js diff --git a/doc/API.md b/doc/API.md index 800807047b..07e5817eb0 100644 --- a/doc/API.md +++ b/doc/API.md @@ -11,6 +11,9 @@ * [`handle`](#handle) * [`unhandle`](#unhandle) * [`ping`](#ping) + * [`addressManager.listen`](#addressManagerlisten) + * [`addressManager.announce`](#addressManagerannounce) + * [`addressManager.noAnnounce`](#addressManagernoannounce) * [`contentRouting.findProviders`](#contentroutingfindproviders) * [`contentRouting.provide`](#contentroutingprovide) * [`contentRouting.put`](#contentroutingput) @@ -360,31 +363,42 @@ Pings a given peer and get the operation's latency. const latency = await libp2p.ping(otherPeerId) ``` -### peerRouting.findPeer +### addressManager.listen -Iterates over all peer routers in series to find the given peer. If the DHT is enabled, it will be tried first. +Getter for getting the addresses that the peer is using for listening on libp2p transports. -`libp2p.peerRouting.findPeer(peerId, options)` +`libp2p.addressManager.listen` -#### Parameters +#### Example -| Name | Type | Description | -|------|------|-------------| -| peerId | [`PeerId`][peer-id] | ID of the peer to find | -| options | `object` | operation options | -| options.timeout | `number` | maximum time the query should run | +```js +// ... +const listenAddresses = libp2p.addressManager.listen +// [ ] +``` -#### Returns +### addressManager.announce -| Type | Description | -|------|-------------| -| `Promise<{ id: PeerId, multiaddrs: Multiaddr[] }>` | Peer data of a known peer | +Getter for getting the addresses that the peer is announcing to other peers in the network. -#### Example +`libp2p.addressManager.announce` ```js // ... -const peer = await libp2p.peerRouting.findPeer(peerId, options) +const announceAddresses = libp2p.addressManager.announce +// [ ] +``` + +### addressManager.noAnnounce + +Getter for getting the addresses that the peer is not announcing in the network. + +`libp2p.addressManager.noAnnounce` + +```js +// ... +const noAnnounceAddresses = libp2p.addressManager.noAnnounce +// [ ] ``` ### contentRouting.findProviders @@ -533,6 +547,33 @@ const key = '/key' const { from, val } = await libp2p.contentRouting.get(key) ``` +### peerRouting.findPeer + +Iterates over all peer routers in series to find the given peer. If the DHT is enabled, it will be tried first. + +`libp2p.peerRouting.findPeer(peerId, options)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| peerId | [`PeerId`][peer-id] | ID of the peer to find | +| options | `object` | operation options | +| options.timeout | `number` | maximum time the query should run | + +#### Returns + +| Type | Description | +|------|-------------| +| `Promise<{ id: PeerId, multiaddrs: Multiaddr[] }>` | Peer data of a known peer | + +#### Example + +```js +// ... +const peer = await libp2p.peerRouting.findPeer(peerId, options) +``` + ### peerStore.addressBook.add Adds known `multiaddrs` of a given peer. If the peer is not known, it will be set with the provided multiaddrs. diff --git a/doc/CONFIGURATION.md b/doc/CONFIGURATION.md index c74a59a6c9..ae6d471ecc 100644 --- a/doc/CONFIGURATION.md +++ b/doc/CONFIGURATION.md @@ -207,6 +207,8 @@ Besides the `modules` and `config`, libp2p allows other internal options and con - `peerInfo`: a previously created instance of [libp2p/js-peer-info](https://github.com/libp2p/js-peer-info). - This is particularly useful if you want to reuse the same `peer-id`, as well as for modules like `libp2p-delegated-content-routing`, which need a `peer-id` in their instantiation. +TODO: Add listen/announce addresses and remove peerInfo!! + ### Examples #### Basic setup diff --git a/package.json b/package.json index 5b5d3bf8a6..045d114deb 100644 --- a/package.json +++ b/package.json @@ -94,7 +94,7 @@ "libp2p-floodsub": "^0.21.0", "libp2p-gossipsub": "^0.4.0", "libp2p-kad-dht": "^0.19.1", - "libp2p-mdns": "^0.14.0", + "libp2p-mdns": "libp2p/js-libp2p-mdns#chore/use-address-manager", "libp2p-mplex": "^0.9.1", "libp2p-secio": "^0.12.1", "libp2p-tcp": "^0.14.1", diff --git a/src/address-manager/README.md b/src/address-manager/README.md new file mode 100644 index 0000000000..6cceff6a16 --- /dev/null +++ b/src/address-manager/README.md @@ -0,0 +1,49 @@ +# Address Manager + +The Address manager is responsible for keeping an updated register of the peer's addresses. It includes 3 different types of Addresses: `Listen Addresses`, `Announce Addresses` and `No Announce Addresses`. + +These Addresses should be specified in your libp2p [configuration](../../doc/CONFIGURATION.md) when you create your node. + +## Listen Addresses + +A libp2p node should have a set of listen addresses, which will be used by libp2p underlying transports to listen for dials from other nodes in the network. + +Before a libp2p node starts, a set of listen addresses should be provided to the AddressManager, so that when the node is started, the libp2p transports can use them to listen for connections. Accordingly, listen addresses should be specified through the libp2p configuration, in order to have the `AddressManager` created with them. + +It is important pointing out that libp2p accepts to listen on addresses that intend to rely on any available local port. In this context, the provided listen addresses might not be exactly the same as the ones used by the transports. For example tcp may replace `/ip4/0.0.0.0/tcp/0` with something like `/ip4/0.0.0.0/tcp/8989`. As a consequence, libp2p should take into account this when advertising its addresses. + +## Announce Addresses + +In some scenarios, a libp2p node will need to announce addresses that it is not listening on. In other words, Announce Addresses are an amendment to the Listen Addresses that aim to enable other nodes to achieve connectivity to this node. + +Scenarios for Announce Addresses include: +- when you setup a libp2p node in your private network at home, but you need to announce your public IP Address to the outside world; +- when you want to announce a DNS address, which maps to your public IP Address. + +## No Announce Addresses + +While we need to add Announce Addresses to enable peers' connectivity, we can also not announce addresses that will not be reachable. This way, No Announce Addresses should be specified so that they are not announced by the peer as addresses that other peers can use to dial it. + +As stated into the Listen Addresses section, Listen Addresses might get modified after libp2p transports get in action and use them to listen for new connections. This way, libp2p should also take into account these changes so that they can be matched when No Announce Addresses are being filtered out for advertising addresses. + +## Implementation + +When a libp2p node is created, the Address Manager will be populated from the provided addresses through the libp2p configuration. Once the node is started, the Transport Manager component will gather the listen addresses from the Address Manager, so that the libp2p transports use them to listen on. + +Libp2p will use the the Address Manager as the source of truth when advertising the peers addresses. After all transports are ready, other libp2p components/subsystems will kickoff, namely the Identify Service and the DHT. Both of them will announce the node addresses to the other peers in the network. The announce and noAnnounce addresses will have an important role here and will be gathered by libp2p to compute its current addresses to advertise everytime it is needed. + +## Future Considerations + +### Dynamic address modifications + +In a future iteration, we can enable these addresses to be modified in runtime. For this, the Address Manager should be responsible for notifying interested subsystems of these changes, through an Event Emitter. + +#### Modify Listen Addresses + +While adding new addresses to listen on runtime is a feasible operation, removing one listen address might have bad implications for the node, since all the connections using that listen address will be closed. With this in mind and taking also into consideration the lack of good use cases for removing listen addresses, the Address Manager API only allows libp2p users to add new Listen Addresses on runtime. + +Every time a new listen address is added, the Address Manager should emit an event with the new multiaddrs to listen. The Transport Manager should listen to this events and act accordingly. + +#### Modify Announce Addresses + +When the announce addresses are modified, the Address Manager should emit an event so that other subsystems can act accordingly. For example, libp2p identify service should use the libp2p push protocol to inform other peers about these changes. diff --git a/src/address-manager/index.js b/src/address-manager/index.js new file mode 100644 index 0000000000..972f6fb2bf --- /dev/null +++ b/src/address-manager/index.js @@ -0,0 +1,55 @@ +'use strict' + +const debug = require('debug') +const log = debug('libp2p:addresses') +log.error = debug('libp2p:addresses:error') + +const multiaddr = require('multiaddr') + +/** + * Responsible for managing the peer addresses. + * Peers can specify their listen, announce and noAnnounce addresses. + * The listen addresses will be used by the libp2p transports to listen for new connections, + * while the announce an noAnnounce addresses will be combined with the listen addresses for + * address adverstising to other peers in the network. + */ +class AddressManager { + /** + * @constructor + * @param {object} [options] + * @param {Array} [options.listen = []] list of multiaddrs string representation to listen. + * @param {Array} [options.announce = []] list of multiaddrs string representation to announce. + * @param {Array} [options.noAnnounce = []] list of multiaddrs string representation to not announce. + */ + constructor ({ listen = [], announce = [], noAnnounce = [] } = {}) { + this.listen = new Set(listen) + this.announce = new Set(announce) + this.noAnnounce = new Set(noAnnounce) + } + + /** + * Get peer listen multiaddrs. + * @return {Array} + */ + getListenMultiaddrs () { + return Array.from(this.listen).map((a) => multiaddr(a)) + } + + /** + * Get peer announcing multiaddrs. + * @return {Array} + */ + getAnnounceMultiaddrs () { + return Array.from(this.announce).map((a) => multiaddr(a)) + } + + /** + * Get peer noAnnouncing multiaddrs. + * @return {Array} + */ + getNoAnnounceMultiaddrs () { + return Array.from(this.noAnnounce).map((a) => multiaddr(a)) + } +} + +module.exports = AddressManager diff --git a/src/circuit/index.js b/src/circuit/index.js index c833f124ea..637f5f86f1 100644 --- a/src/circuit/index.js +++ b/src/circuit/index.js @@ -32,7 +32,7 @@ class Circuit { this._connectionManager = libp2p.connectionManager this._upgrader = upgrader this._options = libp2p._config.relay - this.addresses = libp2p.addresses + this.addressManager = libp2p.addressManager this.peerId = libp2p.peerId this._registrar.handle(multicodec, this._onProtocol.bind(this)) } @@ -122,7 +122,7 @@ class Circuit { type: CircuitPB.Type.HOP, srcPeer: { id: this.peerId.toBytes(), - addrs: this.addresses.listen.map(addr => addr.buffer) + addrs: this.addressManager.getListenMultiaddrs().map(addr => addr.buffer) }, dstPeer: { id: destinationPeer.toBytes(), diff --git a/src/config.js b/src/config.js index 618c35d9e2..cb6aa6f551 100644 --- a/src/config.js +++ b/src/config.js @@ -5,7 +5,9 @@ const Constants = require('./constants') const DefaultConfig = { addresses: { - listen: [] + listen: [], + announce: [], + noAnnounce: [] }, connectionManager: { minPeers: 25 diff --git a/src/identify/index.js b/src/identify/index.js index d8ea46e857..604f10ed5b 100644 --- a/src/identify/index.js +++ b/src/identify/index.js @@ -45,22 +45,20 @@ class IdentifyService { /** * @constructor * @param {object} options - * @param {PeerStore} options.peerStore - * @param {ConnectionManager} options.connectionManager + * @param {Libp2p} options.libp2p * @param {Map} options.protocols A reference to the protocols we support - * @param {PeerId} options.peerId The peer running the identify service - * @param {{ listen: Array}} options.addresses The peer addresses */ constructor (options) { /** * @property {PeerStore} */ - this.peerStore = options.peerStore + this.peerStore = options.libp2p.peerStore /** * @property {ConnectionManager} */ - this.connectionManager = options.connectionManager + this.connectionManager = options.libp2p.connectionManager + this.connectionManager.on('peer:connect', (connection) => { const peerId = connection.remotePeer @@ -70,9 +68,12 @@ class IdentifyService { /** * @property {PeerId} */ - this.peerId = options.peerId + this.peerId = options.libp2p.peerId - this.addresses = options.addresses || {} + /** + * @property {AddressManager} + */ + this._libp2p = options.libp2p this._protocols = options.protocols @@ -91,7 +92,7 @@ class IdentifyService { await pipe( [{ - listenAddrs: this.addresses.listen.map((ma) => ma.buffer), + listenAddrs: this._libp2p.getAdvertisingMultiaddrs().map((ma) => ma.buffer), protocols: Array.from(this._protocols.keys()) }], pb.encode(Message), @@ -216,7 +217,7 @@ class IdentifyService { protocolVersion: PROTOCOL_VERSION, agentVersion: AGENT_VERSION, publicKey, - listenAddrs: this.addresses.listen.map((ma) => ma.buffer), + listenAddrs: this._libp2p.getAdvertisingMultiaddrs().map((ma) => ma.buffer), observedAddr: connection.remoteAddr.buffer, protocols: Array.from(this._protocols.keys()) }) diff --git a/src/index.js b/src/index.js index b32c2f7d0c..40b4f43a78 100644 --- a/src/index.js +++ b/src/index.js @@ -14,6 +14,7 @@ const getPeer = require('./get-peer') const { validate: validateConfig } = require('./config') const { codes } = require('./errors') +const AddressManager = require('./address-manager') const ConnectionManager = require('./connection-manager') const Circuit = require('./circuit') const Dialer = require('./dialer') @@ -47,6 +48,7 @@ class Libp2p extends EventEmitter { // Addresses {listen, announce, noAnnounce} this.addresses = this._options.addresses + this.addressManager = new AddressManager(this._options.addresses) this._modules = this._options.modules this._config = this._options.config @@ -122,10 +124,7 @@ class Libp2p extends EventEmitter { // Add the identify service since we can multiplex this.identifyService = new IdentifyService({ - peerStore: this.peerStore, - connectionManager: this.connectionManager, - peerId: this.peerId, - addresses: this.addresses, + libp2p: this, protocols: this.upgrader.protocols }) this.handle(Object.values(IDENTIFY_PROTOCOLS), this.identifyService.handleMessage) @@ -189,6 +188,8 @@ class Libp2p extends EventEmitter { */ async start () { log('libp2p is starting') + // TODO: consider validate listen addresses on start? + // depend on transports? try { await this._onStarting() await this._onDidStart() @@ -292,6 +293,54 @@ class Libp2p extends EventEmitter { return connection } + /** + * Get peer advertising multiaddrs by concating the addresses used + * by transports to listen with the announce addresses. + * Duplicated addresses and noAnnounce addresses are filtered out. + * This takes into account random ports on matching noAnnounce addresses. + * @return {Array} + */ + getAdvertisingMultiaddrs () { + // Filter noAnnounce multiaddrs + const filterMa = this.addressManager.getNoAnnounceMultiaddrs() + + // Special filter for noAnnounce addresses using a random port + // eg /ip4/0.0.0.0/tcp/0 => /ip4/192.168.1.0/tcp/58751 + const filterSpecial = filterMa + .map((ma) => ({ + protos: ma.protos(), + ...ma.toOptions() + })) + .filter((op) => op.port === 0) + + // Create advertising list + return this.transportManager.getAddrs() + .concat(this.addressManager.getAnnounceMultiaddrs()) + .filter((ma, index, array) => { + // Filter out if repeated + if (array.findIndex((otherMa) => otherMa.equals(ma)) !== index) { + return false + } + + // Filter out if in noAnnounceMultiaddrs + if (filterMa.find((fm) => fm.equals(ma))) { + return false + } + + // Filter out if in the special filter + const options = ma.toOptions() + if (filterSpecial.find((op) => + op.family === options.family && + op.host === options.host && + op.transport === options.transport && + op.protos.length === ma.protos().length + )) { + return false + } + return true + }) + } + /** * Disconnects all connections to the given `peer` * @param {PeerId|multiaddr|string} peer the peer to close connections to @@ -353,14 +402,8 @@ class Libp2p extends EventEmitter { } async _onStarting () { - // Listen on the addresses provided - const multiaddrs = this.addresses.listen - - await this.transportManager.listen(multiaddrs) - - // The addresses may change once the listener starts - // eg /ip4/0.0.0.0/tcp/0 => /ip4/192.168.1.0/tcp/58751 - this.addresses.listen = this.transportManager.getAddrs() + // Listen on the provided transports + await this.transportManager.listen() if (this._config.pubsub.enabled) { this.pubsub && this.pubsub.start() @@ -466,7 +509,6 @@ class Libp2p extends EventEmitter { if (typeof DiscoveryService === 'function') { discoveryService = new DiscoveryService(Object.assign({}, config, { peerId: this.peerId, - multiaddrs: this.addresses.listen, libp2p: this })) } else { diff --git a/src/transport-manager.js b/src/transport-manager.js index bcbaa45e1f..d0aae70538 100644 --- a/src/transport-manager.js +++ b/src/transport-manager.js @@ -127,11 +127,13 @@ class TransportManager { } /** - * Starts listeners for each given Multiaddr. + * Starts listeners for each listen Multiaddr. + * Update listen multiaddrs of the Address Manager after the operation. * @async - * @param {Multiaddr[]} addrs */ - async listen (addrs) { + async listen () { + const addrs = this.libp2p.addressManager.getListenMultiaddrs() + if (addrs.length === 0) { log('no addresses were provided for listening, this node is dial only') return diff --git a/test/addresses/address-manager.spec.js b/test/addresses/address-manager.spec.js new file mode 100644 index 0000000000..4ac35f79db --- /dev/null +++ b/test/addresses/address-manager.spec.js @@ -0,0 +1,93 @@ +'use strict' +/* eslint-env mocha */ + +const chai = require('chai') +chai.use(require('dirty-chai')) +chai.use(require('chai-as-promised')) +const { expect } = chai + +const multiaddr = require('multiaddr') + +const AddressManager = require('../../src/address-manager') +const peerUtils = require('../utils/creators/peer') + +const listenAddresses = ['/ip4/127.0.0.1/tcp/15006/ws', '/ip4/127.0.0.1/tcp/15008/ws'] +const announceAddreses = ['/dns4/peer.io'] + +describe('Address Manager', () => { + it('should not need any addresses', () => { + const am = new AddressManager() + + expect(am.listen.size).to.equal(0) + expect(am.announce.size).to.equal(0) + expect(am.noAnnounce.size).to.equal(0) + }) + + it('should return listen multiaddrs on get', () => { + const am = new AddressManager({ + listen: listenAddresses + }) + + expect(am.listen.size).to.equal(listenAddresses.length) + expect(am.announce.size).to.equal(0) + expect(am.noAnnounce.size).to.equal(0) + + const listenMultiaddrs = am.getListenMultiaddrs() + expect(listenMultiaddrs.length).to.equal(2) + expect(listenMultiaddrs[0].equals(multiaddr(listenAddresses[0]))).to.equal(true) + expect(listenMultiaddrs[1].equals(multiaddr(listenAddresses[1]))).to.equal(true) + }) + + it('should return announce multiaddrs on get', () => { + const am = new AddressManager({ + listen: listenAddresses, + announce: announceAddreses + }) + + expect(am.listen.size).to.equal(listenAddresses.length) + expect(am.announce.size).to.equal(announceAddreses.length) + expect(am.noAnnounce.size).to.equal(0) + + const announceMultiaddrs = am.getAnnounceMultiaddrs() + expect(announceMultiaddrs.length).to.equal(1) + expect(announceMultiaddrs[0].equals(multiaddr(announceAddreses[0]))).to.equal(true) + }) + + it('should return noAnnounce multiaddrs on get', () => { + const am = new AddressManager({ + listen: listenAddresses, + noAnnounce: listenAddresses + }) + + expect(am.listen.size).to.equal(listenAddresses.length) + expect(am.announce.size).to.equal(0) + expect(am.noAnnounce.size).to.equal(listenAddresses.length) + + const noAnnounceMultiaddrs = am.getNoAnnounceMultiaddrs() + expect(noAnnounceMultiaddrs.length).to.equal(2) + expect(noAnnounceMultiaddrs[0].equals(multiaddr(listenAddresses[0]))).to.equal(true) + expect(noAnnounceMultiaddrs[1].equals(multiaddr(listenAddresses[1]))).to.equal(true) + }) +}) + +describe('libp2p.addressManager', () => { + let libp2p + afterEach(() => libp2p && libp2p.stop()) + + it('should populate the AddressManager from the config', async () => { + [libp2p] = await peerUtils.createPeer({ + started: false, + config: { + addresses: { + listen: listenAddresses, + announce: announceAddreses, + noAnnounce: listenAddresses + } + } + }) + + expect(libp2p.addressManager.listen.size).to.equal(listenAddresses.length) + expect(libp2p.addressManager.announce.size).to.equal(announceAddreses.length) + expect(libp2p.addressManager.noAnnounce.size).to.equal(listenAddresses.length) + }) +}) diff --git a/test/addresses/addresses.node.js b/test/addresses/addresses.node.js new file mode 100644 index 0000000000..0526d38804 --- /dev/null +++ b/test/addresses/addresses.node.js @@ -0,0 +1,152 @@ +'use strict' +/* eslint-env mocha */ + +const chai = require('chai') +chai.use(require('dirty-chai')) +chai.use(require('chai-as-promised')) +const { expect } = chai +const sinon = require('sinon') + +const { AddressesOptions } = require('./utils') +const peerUtils = require('../utils/creators/peer') + +const listenAddresses = ['/ip4/127.0.0.1/tcp/0', '/ip4/127.0.0.1/tcp/8000/ws'] +const announceAddreses = ['/dns4/peer.io'] + +describe('libp2p.getAdvertisingMultiaddrs', () => { + let libp2p + + afterEach(() => libp2p && libp2p.stop()) + + it('should keep listen addresses after start, even if changed', async () => { + [libp2p] = await peerUtils.createPeer({ + started: false, + config: { + ...AddressesOptions, + addresses: { + listen: listenAddresses, + announce: announceAddreses + } + } + }) + + let listenAddrs = libp2p.addressManager.listen + expect(listenAddrs.size).to.equal(listenAddresses.length) + expect(listenAddrs.has(listenAddresses[0])).to.equal(true) + expect(listenAddrs.has(listenAddresses[1])).to.equal(true) + + // Should not replace listen addresses after transport listen + // Only transportManager has visibility of the port used + await libp2p.start() + + listenAddrs = libp2p.addressManager.listen + expect(listenAddrs.size).to.equal(listenAddresses.length) + expect(listenAddrs.has(listenAddresses[0])).to.equal(true) + expect(listenAddrs.has(listenAddresses[1])).to.equal(true) + }) + + it('should advertise all addresses if noAnnounce addresses are not provided, but with correct ports', async () => { + [libp2p] = await peerUtils.createPeer({ + config: { + ...AddressesOptions, + addresses: { + listen: listenAddresses, + announce: announceAddreses + } + } + }) + + const tmListen = libp2p.transportManager.getAddrs().map((ma) => ma.toString()) + + const spyAnnounce = sinon.spy(libp2p.addressManager, 'getAnnounceMultiaddrs') + const spyNoAnnounce = sinon.spy(libp2p.addressManager, 'getNoAnnounceMultiaddrs') + const spyListen = sinon.spy(libp2p.addressManager, 'getListenMultiaddrs') + const spyTranspMgr = sinon.spy(libp2p.transportManager, 'getAddrs') + + const advertiseMultiaddrs = libp2p.getAdvertisingMultiaddrs().map((ma) => ma.toString()) + + expect(spyAnnounce).to.have.property('callCount', 1) + expect(spyNoAnnounce).to.have.property('callCount', 1) + expect(spyListen).to.have.property('callCount', 0) // Listen addr should not be used + expect(spyTranspMgr).to.have.property('callCount', 1) + + // Announce 2 listen (transport) + 1 announce + expect(advertiseMultiaddrs.length).to.equal(3) + tmListen.forEach((m) => { + expect(advertiseMultiaddrs).to.include(m) + }) + announceAddreses.forEach((m) => { + expect(advertiseMultiaddrs).to.include(m) + }) + expect(advertiseMultiaddrs).to.not.include(listenAddresses[0]) // Random Port switch + }) + + it('should remove replicated addresses', async () => { + [libp2p] = await peerUtils.createPeer({ + config: { + ...AddressesOptions, + addresses: { + listen: listenAddresses, + announce: [listenAddresses[1]] + } + } + }) + + const advertiseMultiaddrs = libp2p.getAdvertisingMultiaddrs().map((ma) => ma.toString()) + + // Announce 2 listen (transport), ignoring duplicated in announce + expect(advertiseMultiaddrs.length).to.equal(2) + }) + + it('should not advertise noAnnounce addresses', async () => { + const noAnnounce = [listenAddresses[1]] + ;[libp2p] = await peerUtils.createPeer({ + config: { + ...AddressesOptions, + addresses: { + listen: listenAddresses, + announce: announceAddreses, + noAnnounce + } + } + }) + + const advertiseMultiaddrs = libp2p.getAdvertisingMultiaddrs().map((ma) => ma.toString()) + + // Announce 1 listen (transport) not in the noAnnounce and the announce + expect(advertiseMultiaddrs.length).to.equal(2) + + announceAddreses.forEach((m) => { + expect(advertiseMultiaddrs).to.include(m) + }) + noAnnounce.forEach((m) => { + expect(advertiseMultiaddrs).to.not.include(m) + }) + }) + + it('should not advertise noAnnounce addresses with random port switch', async () => { + const noAnnounce = [listenAddresses[0]] + ;[libp2p] = await peerUtils.createPeer({ + config: { + ...AddressesOptions, + addresses: { + listen: listenAddresses, + announce: announceAddreses, + noAnnounce + } + } + }) + + const advertiseMultiaddrs = libp2p.getAdvertisingMultiaddrs().map((ma) => ma.toString()) + + // Announce 1 listen (transport) not in the noAnnounce and the announce + expect(advertiseMultiaddrs.length).to.equal(2) + + announceAddreses.forEach((m) => { + expect(advertiseMultiaddrs).to.include(m) + }) + noAnnounce.forEach((m) => { + expect(advertiseMultiaddrs).to.not.include(m) + }) + }) +}) diff --git a/test/addresses/utils.js b/test/addresses/utils.js new file mode 100644 index 0000000000..08295c7bb8 --- /dev/null +++ b/test/addresses/utils.js @@ -0,0 +1,16 @@ +'use strict' + +const Transport1 = require('libp2p-tcp') +const Transport2 = require('libp2p-websockets') +const mergeOptions = require('merge-options') +const baseOptions = require('../utils/base-options') + +module.exports.baseOptions = baseOptions + +const AddressesOptions = mergeOptions(baseOptions, { + modules: { + transport: [Transport1, Transport2] + } +}) + +module.exports.AddressesOptions = AddressesOptions diff --git a/test/core/listening.node.js b/test/core/listening.node.js index b54f481403..de4bf47bdc 100644 --- a/test/core/listening.node.js +++ b/test/core/listening.node.js @@ -38,7 +38,7 @@ describe('Listening', () => { await libp2p.start() - const addrs = libp2p.addresses.listen + const addrs = libp2p.transportManager.getAddrs() // Should get something like: // /ip4/127.0.0.1/tcp/50866 diff --git a/test/core/ping.node.js b/test/core/ping.node.js index e5546a0a52..99e57d6095 100644 --- a/test/core/ping.node.js +++ b/test/core/ping.node.js @@ -21,8 +21,8 @@ describe('ping', () => { config: baseOptions }) - nodes[0].peerStore.addressBook.set(nodes[1].peerId, nodes[1].addresses.listen) - nodes[1].peerStore.addressBook.set(nodes[0].peerId, nodes[0].addresses.listen) + nodes[0].peerStore.addressBook.set(nodes[1].peerId, nodes[1].getAdvertisingMultiaddrs()) + nodes[1].peerStore.addressBook.set(nodes[0].peerId, nodes[0].getAdvertisingMultiaddrs()) }) it('ping once from peer0 to peer1', async () => { diff --git a/test/dialing/direct.node.js b/test/dialing/direct.node.js index e3bd7d7be5..9ed3b5d0ff 100644 --- a/test/dialing/direct.node.js +++ b/test/dialing/direct.node.js @@ -21,6 +21,7 @@ const { AbortError } = require('libp2p-interfaces/src/transport/errors') const Libp2p = require('../../src') const Dialer = require('../../src/dialer') +const AddressManager = require('../../src/address-manager') const PeerStore = require('../../src/peer-store') const TransportManager = require('../../src/transport-manager') const { codes: ErrorCodes } = require('../../src/errors') @@ -46,7 +47,9 @@ describe('Dialing (direct, TCP)', () => { PeerId.createFromJSON(Peers[0]) ]) remoteTM = new TransportManager({ - libp2p: {}, + libp2p: { + addressManager: new AddressManager({ listen: [listenAddr] }) + }, upgrader: mockUpgrader }) remoteTM.add(Transport.prototype[Symbol.toStringTag], Transport) @@ -278,7 +281,7 @@ describe('Dialing (direct, TCP)', () => { }) sinon.spy(libp2p.dialer, 'connectToPeer') - libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.addresses.listen) + libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.getAdvertisingMultiaddrs()) const connection = await libp2p.dial(remotePeerId) expect(connection).to.exist() @@ -360,7 +363,7 @@ describe('Dialing (direct, TCP)', () => { const fullAddress = remoteAddr.encapsulate(`/p2p/${remoteLibp2p.peerId.toB58String()}`) - libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.addresses.listen) + libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.getAdvertisingMultiaddrs()) const dialResults = await Promise.all([...new Array(dials)].map((_, index) => { if (index % 2 === 0) return libp2p.dial(remoteLibp2p.peerId) return libp2p.dial(fullAddress) @@ -390,7 +393,7 @@ describe('Dialing (direct, TCP)', () => { const error = new Error('Boom') sinon.stub(libp2p.transportManager, 'dial').callsFake(() => Promise.reject(error)) - libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.addresses.listen) + libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.getAdvertisingMultiaddrs()) const dialResults = await pSettle([...new Array(dials)].map((_, index) => { if (index % 2 === 0) return libp2p.dial(remoteLibp2p.peerId) return libp2p.dial(remoteAddr) diff --git a/test/dialing/relay.node.js b/test/dialing/relay.node.js index 4b706225ec..137a203737 100644 --- a/test/dialing/relay.node.js +++ b/test/dialing/relay.node.js @@ -5,6 +5,7 @@ const chai = require('chai') chai.use(require('dirty-chai')) chai.use(require('chai-as-promised')) const { expect } = chai +const sinon = require('sinon') const multiaddr = require('multiaddr') const { collect } = require('streaming-iterables') @@ -24,7 +25,7 @@ describe('Dialing (via relay, TCP)', () => { let relayLibp2p let dstLibp2p - before(async () => { + beforeEach(async () => { const peerIds = await createPeerId({ number: 3 }) // Create 3 nodes, and turn HOP on for the relay ;[srcLibp2p, relayLibp2p, dstLibp2p] = peerIds.map((peerId, index) => { @@ -68,7 +69,9 @@ describe('Dialing (via relay, TCP)', () => { .encapsulate(`/p2p-circuit/p2p/${dstLibp2p.peerId.toB58String()}`) const tcpAddrs = dstLibp2p.transportManager.getAddrs() - await dstLibp2p.transportManager.listen([multiaddr(`/p2p-circuit${relayAddr}/p2p/${relayIdString}`)]) + sinon.stub(dstLibp2p.addressManager, 'listen').value([multiaddr(`/p2p-circuit${relayAddr}/p2p/${relayIdString}`)]) + + await dstLibp2p.transportManager.listen() expect(dstLibp2p.transportManager.getAddrs()).to.have.deep.members([...tcpAddrs, dialAddr.decapsulate('p2p')]) const connection = await srcLibp2p.dial(dialAddr) @@ -151,13 +154,15 @@ describe('Dialing (via relay, TCP)', () => { // Connect the destination peer and the relay const tcpAddrs = dstLibp2p.transportManager.getAddrs() - await dstLibp2p.transportManager.listen([multiaddr(`${relayAddr}/p2p-circuit`)]) + sinon.stub(dstLibp2p.addressManager, 'getListenMultiaddrs').returns([multiaddr(`${relayAddr}/p2p-circuit`)]) + + await dstLibp2p.transportManager.listen() expect(dstLibp2p.transportManager.getAddrs()).to.have.deep.members([...tcpAddrs, dialAddr.decapsulate('p2p')]) // Tamper with the our multiaddrs for the circuit message - srcLibp2p.addresses.listen = [{ + sinon.stub(srcLibp2p.addressManager, 'getListenMultiaddrs').returns([{ buffer: Buffer.from('an invalid multiaddr') - }] + }]) await expect(srcLibp2p.dial(dialAddr)) .to.eventually.be.rejectedWith(AggregateError) diff --git a/test/identify/index.spec.js b/test/identify/index.spec.js index c10bda7b87..b0db66756b 100644 --- a/test/identify/index.spec.js +++ b/test/identify/index.spec.js @@ -44,28 +44,28 @@ describe('Identify', () => { it('should be able to identify another peer', async () => { const localIdentify = new IdentifyService({ - peerId: localPeer, - addresses: { - listen: [] - }, - protocols, - connectionManager: new EventEmitter(), - peerStore: { - addressBook: { - set: () => { } + libp2p: { + peerId: localPeer, + connectionManager: new EventEmitter(), + peerStore: { + addressBook: { + set: () => { } + }, + protoBook: { + set: () => { } + } }, - protoBook: { - set: () => { } - } - } + getAdvertisingMultiaddrs: () => [] + }, + protocols }) const remoteIdentify = new IdentifyService({ - peerId: remotePeer, - addresses: { - listen: [] + libp2p: { + peerId: remotePeer, + connectionManager: new EventEmitter(), + getAdvertisingMultiaddrs: () => [] }, - protocols, - connectionManager: new EventEmitter() + protocols }) const observedAddr = multiaddr('/ip4/127.0.0.1/tcp/1234') @@ -97,28 +97,28 @@ describe('Identify', () => { it('should throw if identified peer is the wrong peer', async () => { const localIdentify = new IdentifyService({ - peerId: localPeer, - addresses: { - listen: [] - }, - protocols, - connectionManager: new EventEmitter(), - peerStore: { - addressBook: { - set: () => { } + libp2p: { + peerId: localPeer, + connectionManager: new EventEmitter(), + peerStore: { + addressBook: { + set: () => { } + }, + protoBook: { + set: () => { } + } }, - protoBook: { - set: () => { } - } - } + getAdvertisingMultiaddrs: () => [] + }, + protocols }) const remoteIdentify = new IdentifyService({ - peerId: remotePeer, - addresses: { - listen: [] + libp2p: { + peerId: remotePeer, + connectionManager: new EventEmitter(), + getAdvertisingMultiaddrs: () => [] }, - protocols, - connectionManager: new EventEmitter() + protocols }) const observedAddr = multiaddr('/ip4/127.0.0.1/tcp/1234') @@ -150,11 +150,11 @@ describe('Identify', () => { connectionManager.getConnection = () => {} const localIdentify = new IdentifyService({ - peerId: localPeer, - addresses: { - listen: [listeningAddr] + libp2p: { + peerId: localPeer, + connectionManager: new EventEmitter(), + getAdvertisingMultiaddrs: () => [listeningAddr] }, - connectionManager, protocols: new Map([ [multicodecs.IDENTIFY], [multicodecs.IDENTIFY_PUSH], @@ -162,18 +162,18 @@ describe('Identify', () => { ]) }) const remoteIdentify = new IdentifyService({ - peerId: remotePeer, - addresses: { - listen: [] - }, - connectionManager, - peerStore: { - addressBook: { - set: () => { } + libp2p: { + peerId: remotePeer, + connectionManager, + peerStore: { + addressBook: { + set: () => { } + }, + protoBook: { + set: () => { } + } }, - protoBook: { - set: () => { } - } + getAdvertisingMultiaddrs: () => [] } }) diff --git a/test/peer-discovery/index.node.js b/test/peer-discovery/index.node.js index 1784c63d32..732afec6e1 100644 --- a/test/peer-discovery/index.node.js +++ b/test/peer-discovery/index.node.js @@ -177,8 +177,8 @@ describe('peer discovery scenarios', () => { remoteLibp2p2.start() ]) - libp2p.peerStore.addressBook.set(remotePeerId1, remoteLibp2p1.addresses.listen) - remoteLibp2p2.peerStore.addressBook.set(remotePeerId1, remoteLibp2p1.addresses.listen) + libp2p.peerStore.addressBook.set(remotePeerId1, remoteLibp2p1.getAdvertisingMultiaddrs()) + remoteLibp2p2.peerStore.addressBook.set(remotePeerId1, remoteLibp2p1.getAdvertisingMultiaddrs()) // Topology: // A -> B diff --git a/test/pubsub/implementations.node.js b/test/pubsub/implementations.node.js index e5ee043e58..49310358f0 100644 --- a/test/pubsub/implementations.node.js +++ b/test/pubsub/implementations.node.js @@ -75,7 +75,7 @@ describe('Pubsub subsystem is able to use different implementations', () => { ]) const libp2pId = libp2p.peerId.toB58String() - libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.addresses.listen) + libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.getAdvertisingMultiaddrs()) const connection = await libp2p.dialProtocol(remotePeerId, multicodec) expect(connection).to.exist() diff --git a/test/pubsub/operation.node.js b/test/pubsub/operation.node.js index f92c191536..02cf24ea3a 100644 --- a/test/pubsub/operation.node.js +++ b/test/pubsub/operation.node.js @@ -47,7 +47,7 @@ describe('Pubsub subsystem operates correctly', () => { remoteLibp2p.start() ]) - libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.addresses.listen) + libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.getAdvertisingMultiaddrs()) }) afterEach(() => Promise.all([ @@ -124,7 +124,7 @@ describe('Pubsub subsystem operates correctly', () => { await libp2p.start() await remoteLibp2p.start() - libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.addresses.listen) + libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.getAdvertisingMultiaddrs()) }) afterEach(() => Promise.all([ diff --git a/test/transports/transport-manager.node.js b/test/transports/transport-manager.node.js index 4fd57e9cd1..1036230acb 100644 --- a/test/transports/transport-manager.node.js +++ b/test/transports/transport-manager.node.js @@ -4,6 +4,8 @@ const chai = require('chai') chai.use(require('dirty-chai')) const { expect } = chai + +const AddressManager = require('../../src/address-manager') const TransportManager = require('../../src/transport-manager') const Transport = require('libp2p-tcp') const multiaddr = require('multiaddr') @@ -18,7 +20,9 @@ describe('Transport Manager (TCP)', () => { before(() => { tm = new TransportManager({ - libp2p: {}, + libp2p: { + addressManager: new AddressManager({ listen: addrs }) + }, upgrader: mockUpgrader, onConnection: () => {} }) @@ -37,7 +41,7 @@ describe('Transport Manager (TCP)', () => { it('should be able to listen', async () => { tm.add(Transport.prototype[Symbol.toStringTag], Transport) - await tm.listen(addrs) + await tm.listen() expect(tm._listeners).to.have.key(Transport.prototype[Symbol.toStringTag]) expect(tm._listeners.get(Transport.prototype[Symbol.toStringTag])).to.have.length(addrs.length) // Ephemeral ip addresses may result in multiple listeners @@ -48,7 +52,7 @@ describe('Transport Manager (TCP)', () => { it('should be able to dial', async () => { tm.add(Transport.prototype[Symbol.toStringTag], Transport) - await tm.listen(addrs) + await tm.listen() const addr = tm.getAddrs().shift() const connection = await tm.dial(addr) expect(connection).to.exist() diff --git a/test/transports/transport-manager.spec.js b/test/transports/transport-manager.spec.js index d165923f49..4211bd06d3 100644 --- a/test/transports/transport-manager.spec.js +++ b/test/transports/transport-manager.spec.js @@ -9,6 +9,7 @@ const sinon = require('sinon') const multiaddr = require('multiaddr') const Transport = require('libp2p-websockets') +const AddressManager = require('../../src/address-manager') const TransportManager = require('../../src/transport-manager') const mockUpgrader = require('../utils/mockUpgrader') const { MULTIADDRS_WEBSOCKETS } = require('../fixtures/browser') @@ -17,12 +18,16 @@ const Libp2p = require('../../src') const Peers = require('../fixtures/peers') const PeerId = require('peer-id') +const listenAddr = multiaddr('/ip4/127.0.0.1/tcp/0') + describe('Transport Manager (WebSockets)', () => { let tm before(() => { tm = new TransportManager({ - libp2p: {}, + libp2p: { + addressManager: new AddressManager({ listen: [listenAddr] }) + }, upgrader: mockUpgrader, onConnection: () => {} }) @@ -78,9 +83,8 @@ describe('Transport Manager (WebSockets)', () => { it('should fail to listen with no valid address', async () => { tm.add(Transport.prototype[Symbol.toStringTag], Transport) - const addrs = [multiaddr('/ip4/127.0.0.1/tcp/0')] - await expect(tm.listen(addrs)) + await expect(tm.listen()) .to.eventually.be.rejected() .and.to.have.property('code', ErrorCodes.ERR_NO_VALID_ADDRESSES) }) diff --git a/test/utils/creators/peer.js b/test/utils/creators/peer.js index d85c4e8d38..db0026f48d 100644 --- a/test/utils/creators/peer.js +++ b/test/utils/creators/peer.js @@ -21,13 +21,14 @@ const listenAddr = multiaddr('/ip4/127.0.0.1/tcp/0') * @param {boolean} [properties.populateAddressBooks] nodes addressBooks should be populated with other peers (default: true) * @return {Promise>} */ -async function createPeer ({ number = 1, fixture = true, started = true, populateAddressBooks = true, config = defaultOptions } = {}) { +async function createPeer ({ number = 1, fixture = true, started = true, populateAddressBooks = true, config = {} } = {}) { const peerIds = await createPeerId({ number, fixture }) const addresses = started ? { listen: [listenAddr] } : {} const peers = await pTimes(number, (i) => Libp2p.create({ peerId: peerIds[i], addresses, + ...defaultOptions, ...config })) @@ -44,7 +45,7 @@ function _populateAddressBooks (peers) { for (let i = 0; i < peers.length; i++) { for (let j = 0; j < peers.length; j++) { if (i !== j) { - peers[i].peerStore.addressBook.set(peers[j].peerId, peers[j].addresses.listen) + peers[i].peerStore.addressBook.set(peers[j].peerId, peers[j].getAdvertisingMultiaddrs()) } } } From 4e4a9988a3da85eb73b4caa4646ed5ea9c86248a Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Sat, 25 Apr 2020 18:08:41 +0200 Subject: [PATCH 097/102] chore: update api docs --- doc/API.md | 97 +++++++++++++++++++++++++++++++++------- doc/CONFIGURATION.md | 11 +++-- doc/GETTING_STARTED.md | 2 + src/transport-manager.js | 1 - 4 files changed, 90 insertions(+), 21 deletions(-) diff --git a/doc/API.md b/doc/API.md index 07e5817eb0..4890264c60 100644 --- a/doc/API.md +++ b/doc/API.md @@ -11,9 +11,10 @@ * [`handle`](#handle) * [`unhandle`](#unhandle) * [`ping`](#ping) - * [`addressManager.listen`](#addressManagerlisten) - * [`addressManager.announce`](#addressManagerannounce) - * [`addressManager.noAnnounce`](#addressManagernoannounce) + * [`getAdvertisingMultiaddrs`](#getadvertisingmultiaddrs) + * [`addressManager.getListenMultiaddrs`](#addressmanagergetlistenmultiaddrs) + * [`addressmger.getAnnounceMultiaddrs`](#addressmanagergetannouncemultiaddrs) + * [`addressManager.getNoAnnounceMultiaddrs`](#addressmanagergetnoannouncemultiaddrs) * [`contentRouting.findProviders`](#contentroutingfindproviders) * [`contentRouting.provide`](#contentroutingprovide) * [`contentRouting.put`](#contentroutingput) @@ -66,7 +67,7 @@ Creates an instance of Libp2p. |------|------|-------------| | options | `object` | libp2p options | | options.modules | `Array` | libp2p modules to use | -| [options.addresses] | `{ listen: Array }` | Addresses to use for transport listening and to announce to the network | +| [options.addresses] | `{ listen: Array, announce: Array, noAnnounce: Array }` | Addresses for transport listening and to advertise to the network | | [options.config] | `object` | libp2p modules configuration and core configuration | | [options.connectionManager] | `object` | libp2p Connection Manager configuration | | [options.datastore] | `object` | must implement [ipfs/interface-datastore](https://github.com/ipfs/interface-datastore) (in memory datastore will be used if not provided) | @@ -363,44 +364,108 @@ Pings a given peer and get the operation's latency. const latency = await libp2p.ping(otherPeerId) ``` -### addressManager.listen +## getAdvertisingMultiaddrs -Getter for getting the addresses that the peer is using for listening on libp2p transports. +Get peer advertising multiaddrs. This computes the advertising multiaddrs of the peer by +joining the multiaddrs that libp2p transports are listening on with the announce multiaddrs +provided in hte libp2p config. No announce multiaddrs will be filtered out, even when +using random ports in the provided multiaddrs. -`libp2p.addressManager.listen` +`libp2p.getAdvertisingMultiaddrs()` + +#### Returns + +| Type | Description | +|------|-------------| +| `Array` | Advertising multiaddrs | + +#### Example + +```js +// ... +const listenMa = libp2p.getAdvertisingMultiaddrs() +// [ ] +``` + +### addressManager.getListenMultiaddrs + +Get the multiaddrs that were provided for listening on libp2p transports. + +`libp2p.addressManager.getListenMultiaddrs()` + +#### Returns + +| Type | Description | +|------|-------------| +| `Array` | Provided listening multiaddrs | #### Example ```js // ... -const listenAddresses = libp2p.addressManager.listen +const listenMa = libp2p.addressManager.getListenMultiaddrs() // [ ] ``` -### addressManager.announce +### addressManager.getAnnounceMultiaddrs + +Get the multiaddrs that were provided to announce to the network. + +`libp2p.addressManager.getAnnounceMultiaddrs()` + +#### Returns -Getter for getting the addresses that the peer is announcing to other peers in the network. +| Type | Description | +|------|-------------| +| `Array` | Provided announce multiaddrs | -`libp2p.addressManager.announce` +#### Example ```js // ... -const announceAddresses = libp2p.addressManager.announce +const announceMa = libp2p.addressManager.getAnnounceMultiaddrs() // [ ] ``` -### addressManager.noAnnounce +### addressManager.getNoAnnounceMultiaddrs + +Get the multiaddrs that were provided to not announce to the network. + +`libp2p.addressManager.getNoAnnounceMultiaddrs()` + +#### Returns -Getter for getting the addresses that the peer is not announcing in the network. +| Type | Description | +|------|-------------| +| `Array` | Provided noAnnounce multiaddrs | -`libp2p.addressManager.noAnnounce` +#### Example ```js // ... -const noAnnounceAddresses = libp2p.addressManager.noAnnounce +const noAnnounceMa = libp2p.addressManager.getNoAnnounceMultiaddrs() // [ ] ``` +### transportManager.getAddrs + +Get the multiaddrs that libp2p transports are using to listen on. + +`libp2p.transportManager.getAddrs()` + +#### Returns + +| Type | Description | +|------|-------------| +| `Array` | listening multiaddrs | + +#### Example + +```js +// ... +const listenMa = libp2p.transportManager.getAddrs() +// [ ] + ### contentRouting.findProviders Iterates over all content routers in series to find providers of the given key. diff --git a/doc/CONFIGURATION.md b/doc/CONFIGURATION.md index ae6d471ecc..bda3c1f924 100644 --- a/doc/CONFIGURATION.md +++ b/doc/CONFIGURATION.md @@ -204,15 +204,19 @@ Moreover, the majority of the modules can be customized via option parameters. T Besides the `modules` and `config`, libp2p allows other internal options and configurations: - `datastore`: an instance of [ipfs/interface-datastore](https://github.com/ipfs/interface-datastore/) modules. - This is used in modules such as the DHT. If it is not provided, `js-libp2p` will use an in memory datastore. -- `peerInfo`: a previously created instance of [libp2p/js-peer-info](https://github.com/libp2p/js-peer-info). +- `peerId`: a previously computed instance of [libp2p/js-peer-id](https://github.com/libp2p/js-peer-id). - This is particularly useful if you want to reuse the same `peer-id`, as well as for modules like `libp2p-delegated-content-routing`, which need a `peer-id` in their instantiation. - -TODO: Add listen/announce addresses and remove peerInfo!! +- `addresses`: an object containing `listen`, `announce` and `noAnnounce` properties with `Array`: + - `listen` addresses will be provided to the libp2p underlying transports for listening on them. + - `announce` addresses will be used to compute the advertises that the node should advertise to the network. + - `noAnnounce` addresses will be used as a filter to compute the advertises that the node should advertise to the network. ### Examples #### Basic setup +TODO: should we add to the basic setup the configuration of listen addresses? we should probably make it a required option? + ```js // Creating a libp2p node with: // transport: websockets + tcp @@ -535,7 +539,6 @@ const node = await Libp2p.create({ As libp2p is designed to be a modular networking library, its usage will vary based on individual project needs. We've included links to some existing project configurations for your reference, in case you wish to replicate their configuration: - - [libp2p-ipfs-nodejs](https://github.com/ipfs/js-ipfs/tree/master/src/core/runtime/libp2p-nodejs.js) - libp2p configuration used by js-ipfs when running in Node.js - [libp2p-ipfs-browser](https://github.com/ipfs/js-ipfs/tree/master/src/core/runtime/libp2p-browser.js) - libp2p configuration used by js-ipfs when running in a Browser (that supports WebRTC) diff --git a/doc/GETTING_STARTED.md b/doc/GETTING_STARTED.md index b05cc4f6e7..9c1ddbc01d 100644 --- a/doc/GETTING_STARTED.md +++ b/doc/GETTING_STARTED.md @@ -136,6 +136,8 @@ If you want to know more about libp2p stream multiplexing, you should read the f Now that you have configured a [**Transport**][transport], [**Crypto**][crypto] and [**Stream Multiplexer**](streamMuxer) module, you can start your libp2p node. We can start and stop libp2p using the [`libp2p.start()`](./API.md#start) and [`libp2p.stop()`](./API.md#stop) methods. +TODO: add listen addresses here? + ```js const Libp2p = require('libp2p') const WebSockets = require('libp2p-websockets') diff --git a/src/transport-manager.js b/src/transport-manager.js index d0aae70538..7bb751f6e1 100644 --- a/src/transport-manager.js +++ b/src/transport-manager.js @@ -128,7 +128,6 @@ class TransportManager { /** * Starts listeners for each listen Multiaddr. - * Update listen multiaddrs of the Address Manager after the operation. * @async */ async listen () { From fd618b6e473eb1c75a098cc3dad17ecba9468a28 Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Tue, 28 Apr 2020 13:42:27 +0000 Subject: [PATCH 098/102] chore(deps-dev): bump datastore-level from 0.14.1 to 1.0.0 Bumps [datastore-level](https://github.com/ipfs/js-datastore-level) from 0.14.1 to 1.0.0. - [Release notes](https://github.com/ipfs/js-datastore-level/releases) - [Changelog](https://github.com/ipfs/js-datastore-level/blob/master/CHANGELOG.md) - [Commits](https://github.com/ipfs/js-datastore-level/compare/v0.14.1...v1.0.0) Signed-off-by: dependabot-preview[bot] --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 43ce186a25..776c32de74 100644 --- a/package.json +++ b/package.json @@ -52,7 +52,7 @@ "chai": "^4.2.0", "chai-string": "^1.5.0", "datastore-fs": "^0.9.0", - "datastore-level": "^0.14.0", + "datastore-level": "^1.0.0", "dirty-chai": "^2.0.1", "level": "^6.0.0", "multihashes": "^0.4.15", From 21b39ebcd0bb9d3ae02e0aca5ae8c5fafb86f8d8 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Tue, 28 Apr 2020 16:21:25 +0200 Subject: [PATCH 099/102] chore: rename address functions and apply suggestions from code review Co-Authored-By: Jacob Heun --- doc/API.md | 32 ++++++++++----------- doc/CONFIGURATION.md | 4 +-- doc/GETTING_STARTED.md | 11 +++++-- src/address-manager/README.md | 12 ++++---- src/address-manager/index.js | 8 +++--- src/circuit/index.js | 2 +- src/identify/index.js | 16 +++++------ src/index.js | 26 ++--------------- src/transport-manager.js | 2 +- test/addresses/address-manager.spec.js | 6 ++-- test/addresses/addresses.node.js | 40 +++++--------------------- test/core/ping.node.js | 4 +-- test/dialing/direct.node.js | 6 ++-- test/dialing/relay.node.js | 4 +-- test/identify/index.spec.js | 12 ++++---- test/peer-discovery/index.node.js | 4 +-- test/pubsub/implementations.node.js | 2 +- test/pubsub/operation.node.js | 4 +-- test/utils/creators/peer.js | 2 +- 19 files changed, 78 insertions(+), 119 deletions(-) diff --git a/doc/API.md b/doc/API.md index 4890264c60..df331fe3ac 100644 --- a/doc/API.md +++ b/doc/API.md @@ -11,10 +11,10 @@ * [`handle`](#handle) * [`unhandle`](#unhandle) * [`ping`](#ping) - * [`getAdvertisingMultiaddrs`](#getadvertisingmultiaddrs) - * [`addressManager.getListenMultiaddrs`](#addressmanagergetlistenmultiaddrs) - * [`addressmger.getAnnounceMultiaddrs`](#addressmanagergetannouncemultiaddrs) - * [`addressManager.getNoAnnounceMultiaddrs`](#addressmanagergetnoannouncemultiaddrs) + * [`multiaddrs`](#multiaddrs) + * [`addressManager.getListenAddrs`](#addressmanagergetlistenaddrs) + * [`addressmger.getAnnounceAddrs`](#addressmanagergetannounceaddrs) + * [`addressManager.getNoAnnounceAddrs`](#addressmanagergetnoannounceaddrs) * [`contentRouting.findProviders`](#contentroutingfindproviders) * [`contentRouting.provide`](#contentroutingprovide) * [`contentRouting.put`](#contentroutingput) @@ -364,14 +364,14 @@ Pings a given peer and get the operation's latency. const latency = await libp2p.ping(otherPeerId) ``` -## getAdvertisingMultiaddrs +## multiaddrs Get peer advertising multiaddrs. This computes the advertising multiaddrs of the peer by joining the multiaddrs that libp2p transports are listening on with the announce multiaddrs provided in hte libp2p config. No announce multiaddrs will be filtered out, even when using random ports in the provided multiaddrs. -`libp2p.getAdvertisingMultiaddrs()` +`libp2p.multiaddrs` #### Returns @@ -383,15 +383,15 @@ using random ports in the provided multiaddrs. ```js // ... -const listenMa = libp2p.getAdvertisingMultiaddrs() +const listenMa = libp2p.multiaddrs // [ ] ``` -### addressManager.getListenMultiaddrs +### addressManager.getListenAddrs Get the multiaddrs that were provided for listening on libp2p transports. -`libp2p.addressManager.getListenMultiaddrs()` +`libp2p.addressManager.getListenAddrs()` #### Returns @@ -403,15 +403,15 @@ Get the multiaddrs that were provided for listening on libp2p transports. ```js // ... -const listenMa = libp2p.addressManager.getListenMultiaddrs() +const listenMa = libp2p.addressManager.getListenAddrs() // [ ] ``` -### addressManager.getAnnounceMultiaddrs +### addressManager.getAnnounceAddrs Get the multiaddrs that were provided to announce to the network. -`libp2p.addressManager.getAnnounceMultiaddrs()` +`libp2p.addressManager.getAnnounceAddrs()` #### Returns @@ -423,15 +423,15 @@ Get the multiaddrs that were provided to announce to the network. ```js // ... -const announceMa = libp2p.addressManager.getAnnounceMultiaddrs() +const announceMa = libp2p.addressManager.getAnnounceAddrs() // [ ] ``` -### addressManager.getNoAnnounceMultiaddrs +### addressManager.getNoAnnounceAddrs Get the multiaddrs that were provided to not announce to the network. -`libp2p.addressManager.getNoAnnounceMultiaddrs()` +`libp2p.addressManager.getNoAnnounceAddrs()` #### Returns @@ -443,7 +443,7 @@ Get the multiaddrs that were provided to not announce to the network. ```js // ... -const noAnnounceMa = libp2p.addressManager.getNoAnnounceMultiaddrs() +const noAnnounceMa = libp2p.addressManager.getNoAnnounceAddrs() // [ ] ``` diff --git a/doc/CONFIGURATION.md b/doc/CONFIGURATION.md index bda3c1f924..fe1c10f256 100644 --- a/doc/CONFIGURATION.md +++ b/doc/CONFIGURATION.md @@ -204,7 +204,7 @@ Moreover, the majority of the modules can be customized via option parameters. T Besides the `modules` and `config`, libp2p allows other internal options and configurations: - `datastore`: an instance of [ipfs/interface-datastore](https://github.com/ipfs/interface-datastore/) modules. - This is used in modules such as the DHT. If it is not provided, `js-libp2p` will use an in memory datastore. -- `peerId`: a previously computed instance of [libp2p/js-peer-id](https://github.com/libp2p/js-peer-id). +- `peerId`: the identity of the node, an instance of [libp2p/js-peer-id](https://github.com/libp2p/js-peer-id). - This is particularly useful if you want to reuse the same `peer-id`, as well as for modules like `libp2p-delegated-content-routing`, which need a `peer-id` in their instantiation. - `addresses`: an object containing `listen`, `announce` and `noAnnounce` properties with `Array`: - `listen` addresses will be provided to the libp2p underlying transports for listening on them. @@ -215,8 +215,6 @@ Besides the `modules` and `config`, libp2p allows other internal options and con #### Basic setup -TODO: should we add to the basic setup the configuration of listen addresses? we should probably make it a required option? - ```js // Creating a libp2p node with: // transport: websockets + tcp diff --git a/doc/GETTING_STARTED.md b/doc/GETTING_STARTED.md index 9c1ddbc01d..c9a772de00 100644 --- a/doc/GETTING_STARTED.md +++ b/doc/GETTING_STARTED.md @@ -136,8 +136,6 @@ If you want to know more about libp2p stream multiplexing, you should read the f Now that you have configured a [**Transport**][transport], [**Crypto**][crypto] and [**Stream Multiplexer**](streamMuxer) module, you can start your libp2p node. We can start and stop libp2p using the [`libp2p.start()`](./API.md#start) and [`libp2p.stop()`](./API.md#stop) methods. -TODO: add listen addresses here? - ```js const Libp2p = require('libp2p') const WebSockets = require('libp2p-websockets') @@ -145,6 +143,9 @@ const SECIO = require('libp2p-secio') const MPLEX = require('libp2p-mplex') const node = await Libp2p.create({ + addresses: { + listen: ['/ip4/127.0.0.1/tcp/8000/ws'] + }, modules: { transport: [WebSockets], connEncryption: [SECIO], @@ -156,6 +157,12 @@ const node = await Libp2p.create({ await node.start() console.log('libp2p has started') +const listenAddrs = node.transportManager.getAddrs() +console.log('libp2p is listening on the following addresses: ', listenAddrs) + +const advertiseAddrs = node.multiaddrs +console.log('libp2p is advertising the following addresses: ', advertiseAddrs) + // stop libp2p await node.stop() console.log('libp2p has stopped') diff --git a/src/address-manager/README.md b/src/address-manager/README.md index 6cceff6a16..53f9324802 100644 --- a/src/address-manager/README.md +++ b/src/address-manager/README.md @@ -8,9 +8,9 @@ These Addresses should be specified in your libp2p [configuration](../../doc/CON A libp2p node should have a set of listen addresses, which will be used by libp2p underlying transports to listen for dials from other nodes in the network. -Before a libp2p node starts, a set of listen addresses should be provided to the AddressManager, so that when the node is started, the libp2p transports can use them to listen for connections. Accordingly, listen addresses should be specified through the libp2p configuration, in order to have the `AddressManager` created with them. +Before a libp2p node starts, its configured listen addresses will be passed to the AddressManager, so that during startup the libp2p transports can use them to listen for connections. Accordingly, listen addresses should be specified through the libp2p configuration, in order to have the `AddressManager` created with them. -It is important pointing out that libp2p accepts to listen on addresses that intend to rely on any available local port. In this context, the provided listen addresses might not be exactly the same as the ones used by the transports. For example tcp may replace `/ip4/0.0.0.0/tcp/0` with something like `/ip4/0.0.0.0/tcp/8989`. As a consequence, libp2p should take into account this when advertising its addresses. +It is important pointing out that libp2p accepts ephemeral listening addresses. In this context, the provided listen addresses might not be exactly the same as the ones used by the transports. For example TCP may replace `/ip4/0.0.0.0/tcp/0` with something like `/ip4/127.0.0.1/tcp/8989`. As a consequence, libp2p should take into account this when determining its advertised addresses. ## Announce Addresses @@ -22,13 +22,13 @@ Scenarios for Announce Addresses include: ## No Announce Addresses -While we need to add Announce Addresses to enable peers' connectivity, we can also not announce addresses that will not be reachable. This way, No Announce Addresses should be specified so that they are not announced by the peer as addresses that other peers can use to dial it. +While we need to add Announce Addresses to enable peers' connectivity, we should also avoid announcing addresses that will not be reachable. No Announce Addresses should be specified so that they are filtered from the advertised multiaddrs. -As stated into the Listen Addresses section, Listen Addresses might get modified after libp2p transports get in action and use them to listen for new connections. This way, libp2p should also take into account these changes so that they can be matched when No Announce Addresses are being filtered out for advertising addresses. +As stated in the Listen Addresses section, Listen Addresses might be modified by libp2p transports after the successfully bind to those addresses. Libp2p should also take these changes into account so that they can be matched when No Announce Addresses are being filtered out of the advertised multiaddrs. ## Implementation -When a libp2p node is created, the Address Manager will be populated from the provided addresses through the libp2p configuration. Once the node is started, the Transport Manager component will gather the listen addresses from the Address Manager, so that the libp2p transports use them to listen on. +When a libp2p node is created, the Address Manager will be populated from the provided addresses through the libp2p configuration. Once the node is started, the Transport Manager component will gather the listen addresses from the Address Manager, so that the libp2p transports can attempt to bind to them. Libp2p will use the the Address Manager as the source of truth when advertising the peers addresses. After all transports are ready, other libp2p components/subsystems will kickoff, namely the Identify Service and the DHT. Both of them will announce the node addresses to the other peers in the network. The announce and noAnnounce addresses will have an important role here and will be gathered by libp2p to compute its current addresses to advertise everytime it is needed. @@ -40,7 +40,7 @@ In a future iteration, we can enable these addresses to be modified in runtime. #### Modify Listen Addresses -While adding new addresses to listen on runtime is a feasible operation, removing one listen address might have bad implications for the node, since all the connections using that listen address will be closed. With this in mind and taking also into consideration the lack of good use cases for removing listen addresses, the Address Manager API only allows libp2p users to add new Listen Addresses on runtime. +While adding new addresses to listen on runtime should be trivial, removing a listen address might have bad implications for the node, since all the connections using that listen address will be closed. However, libp2p should provide a mechanism for both adding and removing listen addresses in the future. Every time a new listen address is added, the Address Manager should emit an event with the new multiaddrs to listen. The Transport Manager should listen to this events and act accordingly. diff --git a/src/address-manager/index.js b/src/address-manager/index.js index 972f6fb2bf..e15279367b 100644 --- a/src/address-manager/index.js +++ b/src/address-manager/index.js @@ -7,7 +7,7 @@ log.error = debug('libp2p:addresses:error') const multiaddr = require('multiaddr') /** - * Responsible for managing the peer addresses. + * Responsible for managing this peers addresses. * Peers can specify their listen, announce and noAnnounce addresses. * The listen addresses will be used by the libp2p transports to listen for new connections, * while the announce an noAnnounce addresses will be combined with the listen addresses for @@ -31,7 +31,7 @@ class AddressManager { * Get peer listen multiaddrs. * @return {Array} */ - getListenMultiaddrs () { + getListenAddrs () { return Array.from(this.listen).map((a) => multiaddr(a)) } @@ -39,7 +39,7 @@ class AddressManager { * Get peer announcing multiaddrs. * @return {Array} */ - getAnnounceMultiaddrs () { + getAnnounceAddrs () { return Array.from(this.announce).map((a) => multiaddr(a)) } @@ -47,7 +47,7 @@ class AddressManager { * Get peer noAnnouncing multiaddrs. * @return {Array} */ - getNoAnnounceMultiaddrs () { + getNoAnnounceAddrs () { return Array.from(this.noAnnounce).map((a) => multiaddr(a)) } } diff --git a/src/circuit/index.js b/src/circuit/index.js index 637f5f86f1..0e7c9fe322 100644 --- a/src/circuit/index.js +++ b/src/circuit/index.js @@ -122,7 +122,7 @@ class Circuit { type: CircuitPB.Type.HOP, srcPeer: { id: this.peerId.toBytes(), - addrs: this.addressManager.getListenMultiaddrs().map(addr => addr.buffer) + addrs: this.addressManager.getListenAddrs().map(addr => addr.buffer) }, dstPeer: { id: destinationPeer.toBytes(), diff --git a/src/identify/index.js b/src/identify/index.js index 604f10ed5b..a89e84bdfa 100644 --- a/src/identify/index.js +++ b/src/identify/index.js @@ -48,16 +48,16 @@ class IdentifyService { * @param {Libp2p} options.libp2p * @param {Map} options.protocols A reference to the protocols we support */ - constructor (options) { + constructor ({ libp2p, protocols }) { /** * @property {PeerStore} */ - this.peerStore = options.libp2p.peerStore + this.peerStore = libp2p.peerStore /** * @property {ConnectionManager} */ - this.connectionManager = options.libp2p.connectionManager + this.connectionManager = libp2p.connectionManager this.connectionManager.on('peer:connect', (connection) => { const peerId = connection.remotePeer @@ -68,14 +68,14 @@ class IdentifyService { /** * @property {PeerId} */ - this.peerId = options.libp2p.peerId + this.peerId = libp2p.peerId /** * @property {AddressManager} */ - this._libp2p = options.libp2p + this._libp2p = libp2p - this._protocols = options.protocols + this._protocols = protocols this.handleMessage = this.handleMessage.bind(this) } @@ -92,7 +92,7 @@ class IdentifyService { await pipe( [{ - listenAddrs: this._libp2p.getAdvertisingMultiaddrs().map((ma) => ma.buffer), + listenAddrs: this._libp2p.multiaddrs.map((ma) => ma.buffer), protocols: Array.from(this._protocols.keys()) }], pb.encode(Message), @@ -217,7 +217,7 @@ class IdentifyService { protocolVersion: PROTOCOL_VERSION, agentVersion: AGENT_VERSION, publicKey, - listenAddrs: this._libp2p.getAdvertisingMultiaddrs().map((ma) => ma.buffer), + listenAddrs: this._libp2p.multiaddrs.map((ma) => ma.buffer), observedAddr: connection.remoteAddr.buffer, protocols: Array.from(this._protocols.keys()) }) diff --git a/src/index.js b/src/index.js index 40b4f43a78..6e0eb60e28 100644 --- a/src/index.js +++ b/src/index.js @@ -297,25 +297,15 @@ class Libp2p extends EventEmitter { * Get peer advertising multiaddrs by concating the addresses used * by transports to listen with the announce addresses. * Duplicated addresses and noAnnounce addresses are filtered out. - * This takes into account random ports on matching noAnnounce addresses. * @return {Array} */ - getAdvertisingMultiaddrs () { + get multiaddrs () { // Filter noAnnounce multiaddrs - const filterMa = this.addressManager.getNoAnnounceMultiaddrs() - - // Special filter for noAnnounce addresses using a random port - // eg /ip4/0.0.0.0/tcp/0 => /ip4/192.168.1.0/tcp/58751 - const filterSpecial = filterMa - .map((ma) => ({ - protos: ma.protos(), - ...ma.toOptions() - })) - .filter((op) => op.port === 0) + const filterMa = this.addressManager.getNoAnnounceAddrs() // Create advertising list return this.transportManager.getAddrs() - .concat(this.addressManager.getAnnounceMultiaddrs()) + .concat(this.addressManager.getAnnounceAddrs()) .filter((ma, index, array) => { // Filter out if repeated if (array.findIndex((otherMa) => otherMa.equals(ma)) !== index) { @@ -327,16 +317,6 @@ class Libp2p extends EventEmitter { return false } - // Filter out if in the special filter - const options = ma.toOptions() - if (filterSpecial.find((op) => - op.family === options.family && - op.host === options.host && - op.transport === options.transport && - op.protos.length === ma.protos().length - )) { - return false - } return true }) } diff --git a/src/transport-manager.js b/src/transport-manager.js index 7bb751f6e1..b1994938fb 100644 --- a/src/transport-manager.js +++ b/src/transport-manager.js @@ -131,7 +131,7 @@ class TransportManager { * @async */ async listen () { - const addrs = this.libp2p.addressManager.getListenMultiaddrs() + const addrs = this.libp2p.addressManager.getListenAddrs() if (addrs.length === 0) { log('no addresses were provided for listening, this node is dial only') diff --git a/test/addresses/address-manager.spec.js b/test/addresses/address-manager.spec.js index 4ac35f79db..3e0d0efdc3 100644 --- a/test/addresses/address-manager.spec.js +++ b/test/addresses/address-manager.spec.js @@ -32,7 +32,7 @@ describe('Address Manager', () => { expect(am.announce.size).to.equal(0) expect(am.noAnnounce.size).to.equal(0) - const listenMultiaddrs = am.getListenMultiaddrs() + const listenMultiaddrs = am.getListenAddrs() expect(listenMultiaddrs.length).to.equal(2) expect(listenMultiaddrs[0].equals(multiaddr(listenAddresses[0]))).to.equal(true) expect(listenMultiaddrs[1].equals(multiaddr(listenAddresses[1]))).to.equal(true) @@ -48,7 +48,7 @@ describe('Address Manager', () => { expect(am.announce.size).to.equal(announceAddreses.length) expect(am.noAnnounce.size).to.equal(0) - const announceMultiaddrs = am.getAnnounceMultiaddrs() + const announceMultiaddrs = am.getAnnounceAddrs() expect(announceMultiaddrs.length).to.equal(1) expect(announceMultiaddrs[0].equals(multiaddr(announceAddreses[0]))).to.equal(true) }) @@ -63,7 +63,7 @@ describe('Address Manager', () => { expect(am.announce.size).to.equal(0) expect(am.noAnnounce.size).to.equal(listenAddresses.length) - const noAnnounceMultiaddrs = am.getNoAnnounceMultiaddrs() + const noAnnounceMultiaddrs = am.getNoAnnounceAddrs() expect(noAnnounceMultiaddrs.length).to.equal(2) expect(noAnnounceMultiaddrs[0].equals(multiaddr(listenAddresses[0]))).to.equal(true) expect(noAnnounceMultiaddrs[1].equals(multiaddr(listenAddresses[1]))).to.equal(true) diff --git a/test/addresses/addresses.node.js b/test/addresses/addresses.node.js index 0526d38804..8993fe1c54 100644 --- a/test/addresses/addresses.node.js +++ b/test/addresses/addresses.node.js @@ -13,7 +13,7 @@ const peerUtils = require('../utils/creators/peer') const listenAddresses = ['/ip4/127.0.0.1/tcp/0', '/ip4/127.0.0.1/tcp/8000/ws'] const announceAddreses = ['/dns4/peer.io'] -describe('libp2p.getAdvertisingMultiaddrs', () => { +describe('libp2p.multiaddrs', () => { let libp2p afterEach(() => libp2p && libp2p.stop()) @@ -58,12 +58,12 @@ describe('libp2p.getAdvertisingMultiaddrs', () => { const tmListen = libp2p.transportManager.getAddrs().map((ma) => ma.toString()) - const spyAnnounce = sinon.spy(libp2p.addressManager, 'getAnnounceMultiaddrs') - const spyNoAnnounce = sinon.spy(libp2p.addressManager, 'getNoAnnounceMultiaddrs') - const spyListen = sinon.spy(libp2p.addressManager, 'getListenMultiaddrs') + const spyAnnounce = sinon.spy(libp2p.addressManager, 'getAnnounceAddrs') + const spyNoAnnounce = sinon.spy(libp2p.addressManager, 'getNoAnnounceAddrs') + const spyListen = sinon.spy(libp2p.addressManager, 'getListenAddrs') const spyTranspMgr = sinon.spy(libp2p.transportManager, 'getAddrs') - const advertiseMultiaddrs = libp2p.getAdvertisingMultiaddrs().map((ma) => ma.toString()) + const advertiseMultiaddrs = libp2p.multiaddrs.map((ma) => ma.toString()) expect(spyAnnounce).to.have.property('callCount', 1) expect(spyNoAnnounce).to.have.property('callCount', 1) @@ -92,7 +92,7 @@ describe('libp2p.getAdvertisingMultiaddrs', () => { } }) - const advertiseMultiaddrs = libp2p.getAdvertisingMultiaddrs().map((ma) => ma.toString()) + const advertiseMultiaddrs = libp2p.multiaddrs.map((ma) => ma.toString()) // Announce 2 listen (transport), ignoring duplicated in announce expect(advertiseMultiaddrs.length).to.equal(2) @@ -111,33 +111,7 @@ describe('libp2p.getAdvertisingMultiaddrs', () => { } }) - const advertiseMultiaddrs = libp2p.getAdvertisingMultiaddrs().map((ma) => ma.toString()) - - // Announce 1 listen (transport) not in the noAnnounce and the announce - expect(advertiseMultiaddrs.length).to.equal(2) - - announceAddreses.forEach((m) => { - expect(advertiseMultiaddrs).to.include(m) - }) - noAnnounce.forEach((m) => { - expect(advertiseMultiaddrs).to.not.include(m) - }) - }) - - it('should not advertise noAnnounce addresses with random port switch', async () => { - const noAnnounce = [listenAddresses[0]] - ;[libp2p] = await peerUtils.createPeer({ - config: { - ...AddressesOptions, - addresses: { - listen: listenAddresses, - announce: announceAddreses, - noAnnounce - } - } - }) - - const advertiseMultiaddrs = libp2p.getAdvertisingMultiaddrs().map((ma) => ma.toString()) + const advertiseMultiaddrs = libp2p.multiaddrs.map((ma) => ma.toString()) // Announce 1 listen (transport) not in the noAnnounce and the announce expect(advertiseMultiaddrs.length).to.equal(2) diff --git a/test/core/ping.node.js b/test/core/ping.node.js index 99e57d6095..439fac7427 100644 --- a/test/core/ping.node.js +++ b/test/core/ping.node.js @@ -21,8 +21,8 @@ describe('ping', () => { config: baseOptions }) - nodes[0].peerStore.addressBook.set(nodes[1].peerId, nodes[1].getAdvertisingMultiaddrs()) - nodes[1].peerStore.addressBook.set(nodes[0].peerId, nodes[0].getAdvertisingMultiaddrs()) + nodes[0].peerStore.addressBook.set(nodes[1].peerId, nodes[1].multiaddrs) + nodes[1].peerStore.addressBook.set(nodes[0].peerId, nodes[0].multiaddrs) }) it('ping once from peer0 to peer1', async () => { diff --git a/test/dialing/direct.node.js b/test/dialing/direct.node.js index 9ed3b5d0ff..ba47d86e83 100644 --- a/test/dialing/direct.node.js +++ b/test/dialing/direct.node.js @@ -281,7 +281,7 @@ describe('Dialing (direct, TCP)', () => { }) sinon.spy(libp2p.dialer, 'connectToPeer') - libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.getAdvertisingMultiaddrs()) + libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.multiaddrs) const connection = await libp2p.dial(remotePeerId) expect(connection).to.exist() @@ -363,7 +363,7 @@ describe('Dialing (direct, TCP)', () => { const fullAddress = remoteAddr.encapsulate(`/p2p/${remoteLibp2p.peerId.toB58String()}`) - libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.getAdvertisingMultiaddrs()) + libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.multiaddrs) const dialResults = await Promise.all([...new Array(dials)].map((_, index) => { if (index % 2 === 0) return libp2p.dial(remoteLibp2p.peerId) return libp2p.dial(fullAddress) @@ -393,7 +393,7 @@ describe('Dialing (direct, TCP)', () => { const error = new Error('Boom') sinon.stub(libp2p.transportManager, 'dial').callsFake(() => Promise.reject(error)) - libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.getAdvertisingMultiaddrs()) + libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.multiaddrs) const dialResults = await pSettle([...new Array(dials)].map((_, index) => { if (index % 2 === 0) return libp2p.dial(remoteLibp2p.peerId) return libp2p.dial(remoteAddr) diff --git a/test/dialing/relay.node.js b/test/dialing/relay.node.js index 137a203737..1ffd75be87 100644 --- a/test/dialing/relay.node.js +++ b/test/dialing/relay.node.js @@ -154,13 +154,13 @@ describe('Dialing (via relay, TCP)', () => { // Connect the destination peer and the relay const tcpAddrs = dstLibp2p.transportManager.getAddrs() - sinon.stub(dstLibp2p.addressManager, 'getListenMultiaddrs').returns([multiaddr(`${relayAddr}/p2p-circuit`)]) + sinon.stub(dstLibp2p.addressManager, 'getListenAddrs').returns([multiaddr(`${relayAddr}/p2p-circuit`)]) await dstLibp2p.transportManager.listen() expect(dstLibp2p.transportManager.getAddrs()).to.have.deep.members([...tcpAddrs, dialAddr.decapsulate('p2p')]) // Tamper with the our multiaddrs for the circuit message - sinon.stub(srcLibp2p.addressManager, 'getListenMultiaddrs').returns([{ + sinon.stub(srcLibp2p.addressManager, 'getListenAddrs').returns([{ buffer: Buffer.from('an invalid multiaddr') }]) diff --git a/test/identify/index.spec.js b/test/identify/index.spec.js index b0db66756b..9ca892ff05 100644 --- a/test/identify/index.spec.js +++ b/test/identify/index.spec.js @@ -55,7 +55,7 @@ describe('Identify', () => { set: () => { } } }, - getAdvertisingMultiaddrs: () => [] + multiaddrs: [] }, protocols }) @@ -63,7 +63,7 @@ describe('Identify', () => { libp2p: { peerId: remotePeer, connectionManager: new EventEmitter(), - getAdvertisingMultiaddrs: () => [] + multiaddrs: [] }, protocols }) @@ -108,7 +108,7 @@ describe('Identify', () => { set: () => { } } }, - getAdvertisingMultiaddrs: () => [] + multiaddrs: [] }, protocols }) @@ -116,7 +116,7 @@ describe('Identify', () => { libp2p: { peerId: remotePeer, connectionManager: new EventEmitter(), - getAdvertisingMultiaddrs: () => [] + multiaddrs: [] }, protocols }) @@ -153,7 +153,7 @@ describe('Identify', () => { libp2p: { peerId: localPeer, connectionManager: new EventEmitter(), - getAdvertisingMultiaddrs: () => [listeningAddr] + multiaddrs: [listeningAddr] }, protocols: new Map([ [multicodecs.IDENTIFY], @@ -173,7 +173,7 @@ describe('Identify', () => { set: () => { } } }, - getAdvertisingMultiaddrs: () => [] + multiaddrs: [] } }) diff --git a/test/peer-discovery/index.node.js b/test/peer-discovery/index.node.js index 732afec6e1..12dd4b67d1 100644 --- a/test/peer-discovery/index.node.js +++ b/test/peer-discovery/index.node.js @@ -177,8 +177,8 @@ describe('peer discovery scenarios', () => { remoteLibp2p2.start() ]) - libp2p.peerStore.addressBook.set(remotePeerId1, remoteLibp2p1.getAdvertisingMultiaddrs()) - remoteLibp2p2.peerStore.addressBook.set(remotePeerId1, remoteLibp2p1.getAdvertisingMultiaddrs()) + libp2p.peerStore.addressBook.set(remotePeerId1, remoteLibp2p1.multiaddrs) + remoteLibp2p2.peerStore.addressBook.set(remotePeerId1, remoteLibp2p1.multiaddrs) // Topology: // A -> B diff --git a/test/pubsub/implementations.node.js b/test/pubsub/implementations.node.js index 49310358f0..7ce32c0a68 100644 --- a/test/pubsub/implementations.node.js +++ b/test/pubsub/implementations.node.js @@ -75,7 +75,7 @@ describe('Pubsub subsystem is able to use different implementations', () => { ]) const libp2pId = libp2p.peerId.toB58String() - libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.getAdvertisingMultiaddrs()) + libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.multiaddrs) const connection = await libp2p.dialProtocol(remotePeerId, multicodec) expect(connection).to.exist() diff --git a/test/pubsub/operation.node.js b/test/pubsub/operation.node.js index 02cf24ea3a..a29ac953da 100644 --- a/test/pubsub/operation.node.js +++ b/test/pubsub/operation.node.js @@ -47,7 +47,7 @@ describe('Pubsub subsystem operates correctly', () => { remoteLibp2p.start() ]) - libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.getAdvertisingMultiaddrs()) + libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.multiaddrs) }) afterEach(() => Promise.all([ @@ -124,7 +124,7 @@ describe('Pubsub subsystem operates correctly', () => { await libp2p.start() await remoteLibp2p.start() - libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.getAdvertisingMultiaddrs()) + libp2p.peerStore.addressBook.set(remotePeerId, remoteLibp2p.multiaddrs) }) afterEach(() => Promise.all([ diff --git a/test/utils/creators/peer.js b/test/utils/creators/peer.js index db0026f48d..6e4a586597 100644 --- a/test/utils/creators/peer.js +++ b/test/utils/creators/peer.js @@ -45,7 +45,7 @@ function _populateAddressBooks (peers) { for (let i = 0; i < peers.length; i++) { for (let j = 0; j < peers.length; j++) { if (i !== j) { - peers[i].peerStore.addressBook.set(peers[j].peerId, peers[j].getAdvertisingMultiaddrs()) + peers[i].peerStore.addressBook.set(peers[j].peerId, peers[j].multiaddrs) } } } From 9e96dbc50f19b4f3912216f973e36be0d7d3596d Mon Sep 17 00:00:00 2001 From: "dependabot-preview[bot]" <27856297+dependabot-preview[bot]@users.noreply.github.com> Date: Wed, 29 Apr 2020 11:06:50 +0200 Subject: [PATCH 100/102] chore(deps-dev): bump datastore-fs from 0.9.1 to 1.0.0 (#46) Bumps [datastore-fs](https://github.com/ipfs/js-datastore-fs) from 0.9.1 to 1.0.0. - [Release notes](https://github.com/ipfs/js-datastore-fs/releases) - [Changelog](https://github.com/ipfs/js-datastore-fs/blob/master/CHANGELOG.md) - [Commits](https://github.com/ipfs/js-datastore-fs/compare/v0.9.1...v1.0.0) Signed-off-by: dependabot-preview[bot] Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 776c32de74..abd4a23ee1 100644 --- a/package.json +++ b/package.json @@ -51,7 +51,7 @@ "aegir": "^21.2.0", "chai": "^4.2.0", "chai-string": "^1.5.0", - "datastore-fs": "^0.9.0", + "datastore-fs": "^1.0.0", "datastore-level": "^1.0.0", "dirty-chai": "^2.0.1", "level": "^6.0.0", From d9fd726163ea2187364c049f031b8577fa9d576e Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Wed, 29 Apr 2020 15:09:32 +0200 Subject: [PATCH 101/102] chore: move to libp2p --- .gitattributes | 2 - .gitignore | 45 ------- .travis.yml | 42 ------- CHANGELOG.md | 147 ----------------------- LICENSE | 21 ---- README.md => keychain/README.md | 0 {doc => keychain/doc}/private-key.png | Bin {doc => keychain/doc}/private-key.xml | 0 package.json => keychain/package.json | 0 {src => keychain/src}/cms.js | 0 {src => keychain/src}/index.js | 0 {src => keychain/src}/keychain.js | 0 {src => keychain/src}/util.js | 0 {test => keychain/test}/browser.js | 0 {test => keychain/test}/cms-interop.js | 0 {test => keychain/test}/keychain.spec.js | 0 {test => keychain/test}/node.js | 0 {test => keychain/test}/peerid.js | 0 18 files changed, 257 deletions(-) delete mode 100644 .gitattributes delete mode 100644 .gitignore delete mode 100644 .travis.yml delete mode 100644 CHANGELOG.md delete mode 100644 LICENSE rename README.md => keychain/README.md (100%) rename {doc => keychain/doc}/private-key.png (100%) rename {doc => keychain/doc}/private-key.xml (100%) rename package.json => keychain/package.json (100%) rename {src => keychain/src}/cms.js (100%) rename {src => keychain/src}/index.js (100%) rename {src => keychain/src}/keychain.js (100%) rename {src => keychain/src}/util.js (100%) rename {test => keychain/test}/browser.js (100%) rename {test => keychain/test}/cms-interop.js (100%) rename {test => keychain/test}/keychain.spec.js (100%) rename {test => keychain/test}/node.js (100%) rename {test => keychain/test}/peerid.js (100%) diff --git a/.gitattributes b/.gitattributes deleted file mode 100644 index ef41d4faa3..0000000000 --- a/.gitattributes +++ /dev/null @@ -1,2 +0,0 @@ -*.png binary -* crlf=input diff --git a/.gitignore b/.gitignore deleted file mode 100644 index 3da57e818c..0000000000 --- a/.gitignore +++ /dev/null @@ -1,45 +0,0 @@ -docs -**/node_modules/ -**/*.log -test/repo-tests* -**/bundle.js - -# Logs -logs -*.log - -coverage -.nyc_output - -# Runtime data -pids -*.pid -*.seed - -# Directory for instrumented libs generated by jscoverage/JSCover -lib-cov - -# Coverage directory used by tools like istanbul -coverage - -# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) -.grunt - -# node-waf configuration -.lock-wscript - -build - -# Dependency directory -# https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git -node_modules - -lib -dist -test/test-data/go-ipfs-repo/LOCK -test/test-data/go-ipfs-repo/LOG -test/test-data/go-ipfs-repo/LOG.old - -# while testing npm5 -package-lock.json -yarn.lock \ No newline at end of file diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 2061bd3272..0000000000 --- a/.travis.yml +++ /dev/null @@ -1,42 +0,0 @@ -language: node_js -cache: npm -stages: - - check - - test - - cov - -node_js: - - '10' - - '12' - -os: - - linux - - osx - - windows - -script: npx nyc -s npm run test:node -- --bail -after_success: npx nyc report --reporter=text-lcov > coverage.lcov && npx codecov - -jobs: - include: - - stage: check - script: - - npx aegir dep-check - - npm run lint - - - stage: test - name: chrome - addons: - chrome: stable - script: - - npx aegir test -t browser -t webworker - - - stage: test - name: firefox - addons: - firefox: latest - script: - - npx aegir test -t browser -t webworker -- --browsers FirefoxHeadless - -notifications: - email: false diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index f661d419b0..0000000000 --- a/CHANGELOG.md +++ /dev/null @@ -1,147 +0,0 @@ - -# [0.6.0](https://github.com/libp2p/js-libp2p-keychain/compare/v0.5.4...v0.6.0) (2019-12-18) - - - - -## [0.5.4](https://github.com/libp2p/js-libp2p-keychain/compare/v0.5.3...v0.5.4) (2019-12-18) - - - - -## [0.5.3](https://github.com/libp2p/js-libp2p-keychain/compare/v0.5.2...v0.5.3) (2019-12-18) - - - - -## [0.5.2](https://github.com/libp2p/js-libp2p-keychain/compare/v0.5.1...v0.5.2) (2019-12-02) - - - - -## [0.5.1](https://github.com/libp2p/js-libp2p-keychain/compare/v0.5.0...v0.5.1) (2019-09-25) - - - - -# [0.5.0](https://github.com/libp2p/js-libp2p-keychain/compare/v0.4.2...v0.5.0) (2019-08-16) - - -* refactor: use async/await instead of callbacks (#37) ([dda315a](https://github.com/libp2p/js-libp2p-keychain/commit/dda315a)), closes [#37](https://github.com/libp2p/js-libp2p-keychain/issues/37) - - -### BREAKING CHANGES - -* The api now uses async/await instead of callbacks. - -Co-Authored-By: Vasco Santos - - - - -## [0.4.2](https://github.com/libp2p/js-libp2p-keychain/compare/v0.4.1...v0.4.2) (2019-06-13) - - -### Bug Fixes - -* throw errors with correct stack trace ([#35](https://github.com/libp2p/js-libp2p-keychain/issues/35)) ([7051b9c](https://github.com/libp2p/js-libp2p-keychain/commit/7051b9c)) - - - - -## [0.4.1](https://github.com/libp2p/js-libp2p-keychain/compare/v0.4.0...v0.4.1) (2019-03-14) - - - - -# [0.4.0](https://github.com/libp2p/js-libp2p-keychain/compare/v0.3.6...v0.4.0) (2019-02-26) - - -### Features - -* adds support for ed25199 and secp256k1 ([#31](https://github.com/libp2p/js-libp2p-keychain/issues/31)) ([9eb11f4](https://github.com/libp2p/js-libp2p-keychain/commit/9eb11f4)) - - - - -## [0.3.6](https://github.com/libp2p/js-libp2p-keychain/compare/v0.3.5...v0.3.6) (2019-01-10) - - -### Bug Fixes - -* reduce bundle size ([#28](https://github.com/libp2p/js-libp2p-keychain/issues/28)) ([7eeed87](https://github.com/libp2p/js-libp2p-keychain/commit/7eeed87)) - - - - -## [0.3.5](https://github.com/libp2p/js-libp2p-keychain/compare/v0.3.4...v0.3.5) (2019-01-10) - - - - -## [0.3.4](https://github.com/libp2p/js-libp2p-keychain/compare/v0.3.3...v0.3.4) (2019-01-04) - - - - -## [0.3.3](https://github.com/libp2p/js-libp2p-keychain/compare/v0.3.2...v0.3.3) (2018-10-25) - - - - -## [0.3.2](https://github.com/libp2p/js-libp2p-keychain/compare/v0.3.1...v0.3.2) (2018-09-18) - - -### Bug Fixes - -* validate createKey params properly ([#26](https://github.com/libp2p/js-libp2p-keychain/issues/26)) ([8dfaab1](https://github.com/libp2p/js-libp2p-keychain/commit/8dfaab1)) - - - - -## [0.3.1](https://github.com/libp2p/js-libp2p-keychain/compare/v0.3.0...v0.3.1) (2018-01-29) - - - - -# [0.3.0](https://github.com/libp2p/js-libp2p-keychain/compare/v0.2.1...v0.3.0) (2018-01-29) - - -### Bug Fixes - -* deepmerge 2.0.1 fails in browser, stay with 1.5.2 ([2ce4444](https://github.com/libp2p/js-libp2p-keychain/commit/2ce4444)) - - - - -## [0.2.1](https://github.com/libp2p/js-libp2p-keychain/compare/v0.2.0...v0.2.1) (2017-12-28) - - -### Features - -* generate unique options for a key chain ([#20](https://github.com/libp2p/js-libp2p-keychain/issues/20)) ([89a451c](https://github.com/libp2p/js-libp2p-keychain/commit/89a451c)) - - - - -# 0.2.0 (2017-12-20) - - -### Bug Fixes - -* error message ([8305d20](https://github.com/libp2p/js-libp2p-keychain/commit/8305d20)) -* lint errors ([06917f7](https://github.com/libp2p/js-libp2p-keychain/commit/06917f7)) -* lint errors ([ff4f656](https://github.com/libp2p/js-libp2p-keychain/commit/ff4f656)) -* linting ([409a999](https://github.com/libp2p/js-libp2p-keychain/commit/409a999)) -* maps an IPFS hash name to its forge equivalent ([f71d3a6](https://github.com/libp2p/js-libp2p-keychain/commit/f71d3a6)), closes [#12](https://github.com/libp2p/js-libp2p-keychain/issues/12) -* more linting ([7c44c91](https://github.com/libp2p/js-libp2p-keychain/commit/7c44c91)) -* return info on removed key [#10](https://github.com/libp2p/js-libp2p-keychain/issues/10) ([f49e753](https://github.com/libp2p/js-libp2p-keychain/commit/f49e753)) - - -### Features - -* move bits from https://github.com/richardschneider/ipfs-encryption ([1a96ae8](https://github.com/libp2p/js-libp2p-keychain/commit/1a96ae8)) -* use libp2p-crypto ([#18](https://github.com/libp2p/js-libp2p-keychain/issues/18)) ([c1627a9](https://github.com/libp2p/js-libp2p-keychain/commit/c1627a9)) - - - diff --git a/LICENSE b/LICENSE deleted file mode 100644 index bbfffbf92d..0000000000 --- a/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2017 libp2p - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/README.md b/keychain/README.md similarity index 100% rename from README.md rename to keychain/README.md diff --git a/doc/private-key.png b/keychain/doc/private-key.png similarity index 100% rename from doc/private-key.png rename to keychain/doc/private-key.png diff --git a/doc/private-key.xml b/keychain/doc/private-key.xml similarity index 100% rename from doc/private-key.xml rename to keychain/doc/private-key.xml diff --git a/package.json b/keychain/package.json similarity index 100% rename from package.json rename to keychain/package.json diff --git a/src/cms.js b/keychain/src/cms.js similarity index 100% rename from src/cms.js rename to keychain/src/cms.js diff --git a/src/index.js b/keychain/src/index.js similarity index 100% rename from src/index.js rename to keychain/src/index.js diff --git a/src/keychain.js b/keychain/src/keychain.js similarity index 100% rename from src/keychain.js rename to keychain/src/keychain.js diff --git a/src/util.js b/keychain/src/util.js similarity index 100% rename from src/util.js rename to keychain/src/util.js diff --git a/test/browser.js b/keychain/test/browser.js similarity index 100% rename from test/browser.js rename to keychain/test/browser.js diff --git a/test/cms-interop.js b/keychain/test/cms-interop.js similarity index 100% rename from test/cms-interop.js rename to keychain/test/cms-interop.js diff --git a/test/keychain.spec.js b/keychain/test/keychain.spec.js similarity index 100% rename from test/keychain.spec.js rename to keychain/test/keychain.spec.js diff --git a/test/node.js b/keychain/test/node.js similarity index 100% rename from test/node.js rename to keychain/test/node.js diff --git a/test/peerid.js b/keychain/test/peerid.js similarity index 100% rename from test/peerid.js rename to keychain/test/peerid.js From c9d776a574bad0ee72e722347e266f2b59176edd Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Thu, 7 May 2020 17:45:48 +0200 Subject: [PATCH 102/102] feat: keychain in libp2p --- .aegir.js | 2 +- doc/API.md | 286 +++++++++++++++++++- doc/CONFIGURATION.md | 30 +++ package.json | 6 +- src/index.js | 44 +++- src/keychain/README.md | 68 ----- src/keychain/cms.js | 6 +- src/keychain/keychain.js | 30 +-- src/keychain/no-keychain.js | 33 +++ test/keychain/cms-interop.spec.js | 8 +- test/keychain/keychain-api.spec.js | 394 ++++++++++++++++++++++++++++ test/keychain/keychain.spec.js | 402 ++--------------------------- 12 files changed, 832 insertions(+), 477 deletions(-) create mode 100644 src/keychain/no-keychain.js create mode 100644 test/keychain/keychain-api.spec.js diff --git a/.aegir.js b/.aegir.js index 08fb705e8e..18d6236a58 100644 --- a/.aegir.js +++ b/.aegir.js @@ -45,7 +45,7 @@ const after = async () => { } module.exports = { - bundlesize: { maxSize: '185kB' }, + bundlesize: { maxSize: '200kB' }, hooks: { pre: before, post: after diff --git a/doc/API.md b/doc/API.md index 851983f4b7..0d1bbff06e 100644 --- a/doc/API.md +++ b/doc/API.md @@ -44,6 +44,17 @@ * [`connectionManager.get`](#connectionmanagerget) * [`connectionManager.setPeerValue`](#connectionmanagersetpeervalue) * [`connectionManager.size`](#connectionmanagersize) + * [`keychain.createKey`](#keychaincreatekey) + * [`keychain.renameKey`](#keychainrenamekey) + * [`keychain.removeKey`](#keychainremovekey) + * [`keychain.exportKey`](#keychainexportkey) + * [`keychain.importKey`](#keychainimportkey) + * [`keychain.importPeer`](#keychainimportpeer) + * [`keychain.list`](#keychainlist) + * [`keychain.findById`](#keychainfindbyid) + * [`keychain.findByName`](#keychainfindbyname) + * [`keychain.cms.encrypt`](#keychaincmsencrypt) + * [`keychain.cms.decrypt`](#keychaincmsdecrypt) * [`metrics.global`](#metricsglobal) * [`metrics.peers`](#metricspeers) * [`metrics.protocols`](#metricsprotocols) @@ -75,7 +86,8 @@ Creates an instance of Libp2p. | [options.connectionManager] | `object` | libp2p Connection Manager configuration | | [options.datastore] | `object` | must implement [ipfs/interface-datastore](https://github.com/ipfs/interface-datastore) (in memory datastore will be used if not provided) | | [options.dialer] | `object` | libp2p Dialer configuration -| [options.metrics] | `object` | libp2p Metrics configuration +| [options.keychain] | [`object`](./CONFIGURATION.md#setup-with-keychain) | libp2p Keychain configuration | +| [options.metrics] | `object` | libp2p Metrics configuration | | [options.peerId] | [`PeerId`][peer-id] | peerId instance (it will be created if not provided) | | [options.peerStore] | `object` | libp2p PeerStore configuration | @@ -1313,6 +1325,278 @@ libp2p.connectionManager.size // 10 ``` +### keychain.createKey + +Create a key in the keychain. + +`libp2p.keychain.createKey(name, type, size)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| name | `string` | The local key name. It cannot already exist. | +| type | `string` | One of the key types; 'rsa' | +| size | `number` | The key size in bits. | + +#### Returns + +| Type | Description | +|------|-------------| +| `Promise<{ id, name }>` | Key info object | + +#### Example + +```js +const keyInfo = await libp2p.keychain.createKey('keyTest', 'rsa', 4096) +``` + +### keychain.renameKey + +Rename a key in the keychain. + +`libp2p.keychain.renameKey(oldName, newName)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| name | `string` | The old local key name. It must already exist. | +| type | `string` | The new local key name. It must not already exist. | + +#### Returns + +| Type | Description | +|------|-------------| +| `Promise<{ id, name }>` | Key info object | + +#### Example + +```js +await libp2p.keychain.createKey('keyTest', 'rsa', 4096) +const keyInfo = await libp2p.keychain.renameKey('keyTest', 'keyNewNtest') +``` + +### keychain.removeKey + +Removes a key from the keychain. + +`libp2p.keychain.removeKey(name)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| name | `string` | The local key name. It must already exist. | + +#### Returns + +| Type | Description | +|------|-------------| +| `Promise<{ id, name }>` | Key info object | + +#### Example + +```js +await libp2p.keychain.createKey('keyTest', 'rsa', 4096) +const keyInfo = await libp2p.keychain.removeKey('keyTest') +``` + +### keychain.exportKey + +Export an existing key as a PEM encrypted PKCS #8 string. + +`libp2p.keychain.exportKey(name, password)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| name | `string` | The local key name. It must already exist. | +| password | `string` | The password to use. | + +#### Returns + +| Type | Description | +|------|-------------| +| `Promise` | Key as a PEM encrypted PKCS #8 | + +#### Example + +```js +await libp2p.keychain.createKey('keyTest', 'rsa', 4096) +const pemKey = await libp2p.keychain.exportKey('keyTest', 'password123') +``` + +### keychain.importKey + +Import a new key from a PEM encoded PKCS #8 string. + +`libp2p.keychain.importKey(name, pem, password)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| name | `string` | The local key name. It must not exist. | +| pem | `string` | The PEM encoded PKCS #8 string. | +| password | `string` | The password to use. | + +#### Returns + +| Type | Description | +|------|-------------| +| `Promise<{ id, name }>` | Key info object | + +#### Example + +```js +await libp2p.keychain.createKey('keyTest', 'rsa', 4096) +const pemKey = await libp2p.keychain.exportKey('keyTest', 'password123') +const keyInfo = await libp2p.keychain.importKey('keyTestImport', pemKey, 'password123') +``` + +### keychain.importPeer + +Import a new key from a PeerId. + +`libp2p.keychain.importPeer(name, peerId)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| name | `string` | The local key name. It must not exist. | +| peerId | ['PeerId'][peer-id] | The PEM encoded PKCS #8 string. | + +#### Returns + +| Type | Description | +|------|-------------| +| `Promise<{ id, name }>` | Key info object | + +#### Example + +```js +const keyInfo = await libp2p.keychain.importPeer('keyTestImport', peerId) +``` + +### keychain.list + +List all the keys. + +`libp2p.keychain.list()` + +#### Returns + +| Type | Description | +|------|-------------| +| `Promise>` | Array of Key info | + +#### Example + +```js +const keyInfos = await libp2p.keychain.list() +``` + +### keychain.findById + +Find a key by it's id. + +`libp2p.keychain.findById(id)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| id | `string` | The universally unique key identifier. | + +#### Returns + +| Type | Description | +|------|-------------| +| `Promise<{ id, name }>` | Key info object | + +#### Example + +```js +const keyInfo = await libp2p.keychain.createKey('keyTest', 'rsa', 4096) +const keyInfo2 = await libp2p.keychain.findById(keyInfo.id) +``` + +### keychain.findByName + +Find a key by it's name. + +`libp2p.keychain.findByName(id)` + +#### Parameters + +| Name | Type | Description | +|------|------|-------------| +| id | `string` | The local key name. | + +#### Returns + +| Type | Description | +|------|-------------| +| `Promise<{ id, name }>` | Key info object | + +#### Example + +```js +const keyInfo = await libp2p.keychain.createKey('keyTest', 'rsa', 4096) +const keyInfo2 = await libp2p.keychain.findByName('keyTest') +``` + +### keychain.cms.encrypt + +Encrypt protected data using the Cryptographic Message Syntax (CMS). + +`libp2p.keychain.cms.encrypt(name, data)` + +| Name | Type | Description | +|------|------|-------------| +| name | `string` | The local key name. | +| data | `Buffer` | The data to encrypt. | + +#### Returns + +| Type | Description | +|------|-------------| +| `Promise` | Encrypted data as a PKCS #7 message in DER. | + +#### Example + +```js +const keyInfo = await libp2p.keychain.createKey('keyTest', 'rsa', 4096) +const enc = await libp2p.keychain.cms.encrypt('keyTest', Buffer.from('data')) +``` + +### keychain.cms.decrypt + +Decrypt protected data using the Cryptographic Message Syntax (CMS). +The keychain must contain one of the keys used to encrypt the data. If none of the keys exists, an Error is returned with the property 'missingKeys'. + +`libp2p.keychain.cms.decrypt(cmsData)` + +| Name | Type | Description | +|------|------|-------------| +| cmsData | `string` | The CMS encrypted data to decrypt. | + +#### Returns + +| Type | Description | +|------|-------------| +| `Promise` | Decrypted data. | + +#### Example + +```js +const keyInfo = await libp2p.keychain.createKey('keyTest', 'rsa', 4096) +const enc = await libp2p.keychain.cms.encrypt('keyTest', Buffer.from('data')) +``` + ### metrics.global A [`Stats`](#stats) object of tracking the global bandwidth of the libp2p node. diff --git a/doc/CONFIGURATION.md b/doc/CONFIGURATION.md index 833b4ac0b1..b9598d069b 100644 --- a/doc/CONFIGURATION.md +++ b/doc/CONFIGURATION.md @@ -20,6 +20,7 @@ - [Customizing DHT](#customizing-dht) - [Setup with Content and Peer Routing](#setup-with-content-and-peer-routing) - [Setup with Relay](#setup-with-relay) + - [Setup with Keychain](#setup-with-keychain) - [Configuring Dialing](#configuring-dialing) - [Configuring Connection Manager](#configuring-connection-manager) - [Configuring Metrics](#configuring-metrics) @@ -422,6 +423,35 @@ const node = await Libp2p.create({ }) ``` +#### Setup with Keychain + +Libp2p allows you to setup a secure key chain to manage your keys. The keychain configuration object should have the following properties: + +| Name | Type | Description | +|------|------|-------------| +| pass | `string` | Passphrase to use in the keychain (minimum of 20 characters). | +| datastore | `object` | must implement [ipfs/interface-datastore](https://github.com/ipfs/interface-datastore) | + +```js +const Libp2p = require('libp2p') +const TCP = require('libp2p-tcp') +const MPLEX = require('libp2p-mplex') +const SECIO = require('libp2p-secio') +const LevelStore = require('datastore-level') + +const node = await Libp2p.create({ + modules: { + transport: [TCP], + streamMuxer: [MPLEX], + connEncryption: [SECIO] + }, + keychain: { + pass: 'notsafepassword123456789', + datastore: new LevelStore('path/to/store') + } +}) +``` + #### Configuring Dialing Dialing in libp2p can be configured to limit the rate of dialing, and how long dials are allowed to take. The below configuration example shows the default values for the dialer. diff --git a/package.json b/package.json index 517b14fbe2..be1fe40010 100644 --- a/package.json +++ b/package.json @@ -67,12 +67,14 @@ "multiaddr": "^7.4.3", "multistream-select": "^0.15.0", "mutable-proxy": "^1.0.0", + "node-forge": "^0.9.1", "p-any": "^3.0.0", "p-fifo": "^1.0.0", "p-settle": "^4.0.1", "peer-id": "^0.13.11", "protons": "^1.0.1", "retimer": "^2.0.0", + "sanitize-filename": "^1.6.3", "streaming-iterables": "^4.1.0", "timeout-abort-controller": "^1.0.0", "xsalsa20": "^1.0.2" @@ -96,6 +98,7 @@ "it-concat": "^1.0.0", "it-pair": "^1.0.0", "it-pushable": "^1.4.0", + "level": "^6.0.1", "libp2p-bootstrap": "^0.11.0", "libp2p-delegated-content-routing": "^0.5.0", "libp2p-delegated-peer-routing": "^0.5.0", @@ -103,12 +106,13 @@ "libp2p-gossipsub": "^0.4.0", "libp2p-kad-dht": "^0.19.1", "libp2p-mdns": "^0.14.1", - "libp2p-noise": "^1.1.0", "libp2p-mplex": "^0.9.5", + "libp2p-noise": "^1.1.0", "libp2p-secio": "^0.12.4", "libp2p-tcp": "^0.14.1", "libp2p-webrtc-star": "^0.18.0", "libp2p-websockets": "^0.13.1", + "multihashes": "^0.4.19", "nock": "^12.0.3", "p-defer": "^3.0.0", "p-times": "^3.0.0", diff --git a/src/index.js b/src/index.js index 6c4bab3843..0050e16e0c 100644 --- a/src/index.js +++ b/src/index.js @@ -6,6 +6,7 @@ const globalThis = require('ipfs-utils/src/globalthis') const log = debug('libp2p') log.error = debug('libp2p:error') +const { MemoryDatastore } = require('interface-datastore') const PeerId = require('peer-id') const peerRouting = require('./peer-routing') @@ -19,6 +20,8 @@ const AddressManager = require('./address-manager') const ConnectionManager = require('./connection-manager') const Circuit = require('./circuit') const Dialer = require('./dialer') +const Keychain = require('./keychain') +const NoKeychain = require('./keychain/no-keychain') const Metrics = require('./metrics') const TransportManager = require('./transport-manager') const Upgrader = require('./upgrader') @@ -44,8 +47,9 @@ class Libp2p extends EventEmitter { // and add default values where appropriate this._options = validateConfig(_options) - this.peerId = this._options.peerId this.datastore = this._options.datastore + this.keychain = this._options.keychain + this.peerId = this._options.peerId this.peerStore = (this.datastore && this._options.peerStore.persistence) ? new PersistentPeerStore({ @@ -168,6 +172,9 @@ class Libp2p extends EventEmitter { this.peerRouting = peerRouting(this) this.contentRouting = contentRouting(this) + // Keychain + this.keychain = this._options._keychain || new NoKeychain() + // Mount default protocols ping.mount(this) @@ -541,13 +548,40 @@ class Libp2p extends EventEmitter { * @returns {Libp2p} */ Libp2p.create = async function create (options = {}) { - if (options.peerId) { - return new Libp2p(options) + let peerId = options.peerId + + if (!peerId) { + peerId = await PeerId.create() + + options.peerId = peerId } - const peerId = await PeerId.create() + const keychainOptions = options.keychain || {} + + if (keychainOptions.pass) { + log('creating keychain') + + const datastore = keychainOptions.datastore || new MemoryDatastore() + const keychainOpts = Keychain.generateOptions() + + const keychain = new Keychain(datastore, { + passPhrase: keychainOptions.pass, + ...keychainOpts, + ...keychainOptions + }) + + log('keychain constructed') + + // Import the private key as 'self', if needed. + try { + await keychain.findByName('self') + } catch (err) { + await keychain.importPeer('self', peerId) + } + + options._keychain = keychain + } - options.peerId = peerId return new Libp2p(options) } diff --git a/src/keychain/README.md b/src/keychain/README.md index 37829b48a7..1a5a6ce387 100644 --- a/src/keychain/README.md +++ b/src/keychain/README.md @@ -1,20 +1,7 @@ # js-libp2p-keychain -[![](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://protocol.ai) -[![](https://img.shields.io/badge/project-libp2p-yellow.svg?style=flat-square)](http://libp2p.io/) -[![](https://img.shields.io/badge/freenode-%23libp2p-yellow.svg?style=flat-square)](http://webchat.freenode.net/?channels=%23libp2p) -[![Discourse posts](https://img.shields.io/discourse/https/discuss.libp2p.io/posts.svg)](https://discuss.libp2p.io) -[![](https://img.shields.io/codecov/c/github/libp2p/js-libp2p-keychain.svg?style=flat-square)](https://codecov.io/gh/libp2p/js-libp2p-keychain) -[![](https://img.shields.io/travis/libp2p/js-libp2p-keychain.svg?style=flat-square)](https://travis-ci.com/libp2p/js-libp2p-keychain) -[![Dependency Status](https://david-dm.org/libp2p/js-libp2p-keychain.svg?style=flat-square)](https://david-dm.org/libp2p/js-libp2p-keychain) -[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat-square)](https://github.com/feross/standard) - > A secure key chain for libp2p in JavaScript -## Lead Maintainer - -[Vasco Santos](https://github.com/vasco-santos). - ## Features - Manages the lifecycle of a key @@ -26,49 +13,6 @@ - Uses PKCS 7: CMS (aka RFC 5652) to provide cryptographically protected messages - Delays reporting errors to slow down brute force attacks -## Table of Contents - -## Install - -```sh -npm install --save libp2p-keychain -``` - -### Usage - -```js -const Keychain = require('libp2p-keychain') -const FsStore = require('datastore-fs') - -const datastore = new FsStore('./a-keystore') -const opts = { - passPhrase: 'some long easily remembered phrase' -} -const keychain = new Keychain(datastore, opts) -``` - -## API - -Managing a key - -- `async createKey (name, type, size)` -- `async renameKey (oldName, newName)` -- `async removeKey (name)` -- `async exportKey (name, password)` -- `async importKey (name, pem, password)` -- `async importPeer (name, peer)` - -A naming service for a key - -- `async listKeys ()` -- `async findKeyById (id)` -- `async findKeyByName (name)` - -Cryptographically protected messages - -- `async cms.encrypt (name, plain)` -- `async cms.decrypt (cmsData)` - ### KeyInfo The key management and naming service API all return a `KeyInfo` object. The `id` is a universally unique identifier for the key. The `name` is local to the key chain. @@ -109,15 +53,3 @@ The actual physical storage of an encrypted key is left to implementations of [i ### Cryptographic Message Syntax (CMS) CMS, aka [PKCS #7](https://en.wikipedia.org/wiki/PKCS) and [RFC 5652](https://tools.ietf.org/html/rfc5652), describes an encapsulation syntax for data protection. It is used to digitally sign, digest, authenticate, or encrypt arbitrary message content. Basically, `cms.encrypt` creates a DER message that can be only be read by someone holding the private key. - -## Contribute - -Feel free to join in. All welcome. Open an [issue](https://github.com/libp2p/js-libp2p-keychain/issues)! - -This repository falls under the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md). - -[![](https://cdn.rawgit.com/jbenet/contribute-ipfs-gif/master/img/contribute.gif)](https://github.com/ipfs/community/blob/master/CONTRIBUTING.md) - -## License - -[MIT](LICENSE) diff --git a/src/keychain/cms.js b/src/keychain/cms.js index 9bec4b9451..ea35a511a9 100644 --- a/src/keychain/cms.js +++ b/src/keychain/cms.js @@ -43,7 +43,7 @@ class CMS { throw errcode(new Error('Plain data must be a Buffer'), 'ERR_INVALID_PARAMS') } - const key = await this.keychain.findKeyByName(name) + const key = await this.keychain.findByName(name) const pem = await this.keychain._getPrivateKey(name) const privateKey = forge.pki.decryptRsaPrivateKey(pem, this.keychain._()) const certificate = await certificateForKey(key, privateKey) @@ -96,7 +96,7 @@ class CMS { const r = await findAsync(recipients, async (recipient) => { try { - const key = await this.keychain.findKeyById(recipient.keyId) + const key = await this.keychain.findById(recipient.keyId) if (key) return true } catch (err) { return false @@ -111,7 +111,7 @@ class CMS { }) } - const key = await this.keychain.findKeyById(r.keyId) + const key = await this.keychain.findById(r.keyId) const pem = await this.keychain._getPrivateKey(key.name) const privateKey = forge.pki.decryptRsaPrivateKey(pem, this.keychain._()) cms.decrypt(r.recipient, privateKey) diff --git a/src/keychain/keychain.js b/src/keychain/keychain.js index aae7897224..77f2b2cd20 100644 --- a/src/keychain/keychain.js +++ b/src/keychain/keychain.js @@ -104,29 +104,29 @@ class Keychain { } this.store = store - const opts = mergeOptions(defaultOptions, options) + this.opts = mergeOptions(defaultOptions, options) // Enforce NIST SP 800-132 - if (!opts.passPhrase || opts.passPhrase.length < 20) { + if (!this.opts.passPhrase || this.opts.passPhrase.length < 20) { throw new Error('passPhrase must be least 20 characters') } - if (opts.dek.keyLength < NIST.minKeyLength) { + if (this.opts.dek.keyLength < NIST.minKeyLength) { throw new Error(`dek.keyLength must be least ${NIST.minKeyLength} bytes`) } - if (opts.dek.salt.length < NIST.minSaltLength) { + if (this.opts.dek.salt.length < NIST.minSaltLength) { throw new Error(`dek.saltLength must be least ${NIST.minSaltLength} bytes`) } - if (opts.dek.iterationCount < NIST.minIterationCount) { + if (this.opts.dek.iterationCount < NIST.minIterationCount) { throw new Error(`dek.iterationCount must be least ${NIST.minIterationCount}`) } // Create the derived encrypting key const dek = crypto.pbkdf2( - opts.passPhrase, - opts.dek.salt, - opts.dek.iterationCount, - opts.dek.keyLength, - opts.dek.hash) + this.opts.passPhrase, + this.opts.dek.salt, + this.opts.dek.iterationCount, + this.opts.dek.keyLength, + this.opts.dek.hash) Object.defineProperty(this, '_', { value: () => dek }) } @@ -229,7 +229,7 @@ class Keychain { * * @returns {KeyInfo[]} */ - async listKeys () { + async list () { const self = this const query = { prefix: infoPrefix @@ -249,9 +249,9 @@ class Keychain { * @param {string} id - The universally unique key identifier. * @returns {KeyInfo} */ - async findKeyById (id) { + async findById (id) { try { - const keys = await this.listKeys() + const keys = await this.list() return keys.find((k) => k.id === id) } catch (err) { return throwDelayed(err) @@ -264,7 +264,7 @@ class Keychain { * @param {string} name - The local key name. * @returns {KeyInfo} */ - async findKeyByName (name) { + async findByName (name) { if (!validateKeyName(name)) { return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) } @@ -290,7 +290,7 @@ class Keychain { return throwDelayed(errcode(new Error(`Invalid key name '${name}'`), 'ERR_INVALID_KEY_NAME')) } const dsname = DsName(name) - const keyInfo = await self.findKeyByName(name) + const keyInfo = await self.findByName(name) const batch = self.store.batch() batch.delete(dsname) batch.delete(DsInfoName(name)) diff --git a/src/keychain/no-keychain.js b/src/keychain/no-keychain.js new file mode 100644 index 0000000000..6e4e612a57 --- /dev/null +++ b/src/keychain/no-keychain.js @@ -0,0 +1,33 @@ +'use strict' + +function fail () { + throw new Error('Key management requires \'--pass ...\' option') +} + +class NoKeychain { + static get options () { fail() } + + static generateOptions () { fail() } + + createKey () { fail() } + + listKeys () { fail() } + + findKeyById () { fail() } + + findKeyByName () { fail() } + + renameKey () { fail() } + + removeKey () { fail() } + + exportKey () { fail() } + + importKey () { fail() } + + importPeer () { fail() } + + get cms () { fail() } +} + +module.exports = NoKeychain diff --git a/test/keychain/cms-interop.spec.js b/test/keychain/cms-interop.spec.js index 312e8113d9..8eb19f6e99 100644 --- a/test/keychain/cms-interop.spec.js +++ b/test/keychain/cms-interop.spec.js @@ -10,7 +10,7 @@ chai.use(require('chai-string')) const os = require('os') const path = require('path') -const { isBrowser } = require('ipfs-utils/src/env') +const { isNode } = require('ipfs-utils/src/env') const FsStore = require('datastore-fs') const LevelStore = require('datastore-level') @@ -22,9 +22,9 @@ describe('cms interop', () => { let ks before(() => { - const datastore = isBrowser - ? new LevelStore('test-keystore-1', { db: require('level') }) - : new FsStore(path.join(os.tmpdir(), 'test-keystore-1-' + Date.now())) + const datastore = isNode + ? new FsStore(path.join(os.tmpdir(), 'test-keystore-1-' + Date.now())) + : new LevelStore('test-keystore-1', { db: require('level') }) ks = new Keychain(datastore, { passPhrase: passPhrase }) }) diff --git a/test/keychain/keychain-api.spec.js b/test/keychain/keychain-api.spec.js new file mode 100644 index 0000000000..3fc66708c7 --- /dev/null +++ b/test/keychain/keychain-api.spec.js @@ -0,0 +1,394 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const expect = chai.expect +const fail = expect.fail +chai.use(require('dirty-chai')) +chai.use(require('chai-string')) + +const os = require('os') +const path = require('path') +const { isNode } = require('ipfs-utils/src/env') +const FsStore = require('datastore-fs') +const LevelStore = require('datastore-level') +const Keychain = require('../../src/keychain') +const PeerId = require('peer-id') + +describe('keychain api', () => { + const passPhrase = 'this is not a secure phrase' + const rsaKeyName = 'tajné jméno' + const renamedRsaKeyName = 'ชื่อลับ' + let rsaKeyInfo + let emptyKeystore + let ks + let datastore1, datastore2 + + before(() => { + datastore1 = isNode + ? new FsStore(path.join(os.tmpdir(), 'test-keystore-1-' + Date.now())) + : new LevelStore('test-keystore-1', { db: require('level') }) + datastore2 = isNode + ? new FsStore(path.join(os.tmpdir(), 'test-keystore-2-' + Date.now())) + : new LevelStore('test-keystore-2', { db: require('level') }) + + ks = new Keychain(datastore2, { passPhrase: passPhrase }) + emptyKeystore = new Keychain(datastore1, { passPhrase: passPhrase }) + }) + + it('needs a pass phrase to encrypt a key', () => { + expect(() => new Keychain(datastore2)).to.throw() + }) + + it('needs a NIST SP 800-132 non-weak pass phrase', () => { + expect(() => new Keychain(datastore2, { passPhrase: '< 20 character' })).to.throw() + }) + + it('needs a store to persist a key', () => { + expect(() => new Keychain(null, { passPhrase: passPhrase })).to.throw() + }) + + it('has default options', () => { + expect(Keychain.options).to.exist() + }) + + it('needs a supported hashing alorithm', () => { + const ok = new Keychain(datastore2, { passPhrase: passPhrase, dek: { hash: 'sha2-256' } }) + expect(ok).to.exist() + expect(() => new Keychain(datastore2, { passPhrase: passPhrase, dek: { hash: 'my-hash' } })).to.throw() + }) + + it('can generate options', () => { + const options = Keychain.generateOptions() + options.passPhrase = passPhrase + const chain = new Keychain(datastore2, options) + expect(chain).to.exist() + }) + + describe('key name', () => { + it('is a valid filename and non-ASCII', async () => { + const errors = await Promise.all([ + ks.removeKey('../../nasty').then(fail, err => err), + ks.removeKey('').then(fail, err => err), + ks.removeKey(' ').then(fail, err => err), + ks.removeKey(null).then(fail, err => err), + ks.removeKey(undefined).then(fail, err => err) + ]) + + expect(errors).to.have.length(5) + errors.forEach(error => { + expect(error).to.have.property('code', 'ERR_INVALID_KEY_NAME') + }) + }) + }) + + describe('key', () => { + it('can be an RSA key', async () => { + rsaKeyInfo = await ks.createKey(rsaKeyName, 'rsa', 2048) + expect(rsaKeyInfo).to.exist() + expect(rsaKeyInfo).to.have.property('name', rsaKeyName) + expect(rsaKeyInfo).to.have.property('id') + }) + + it('is encrypted PEM encoded PKCS #8', async () => { + const pem = await ks._getPrivateKey(rsaKeyName) + return expect(pem).to.startsWith('-----BEGIN ENCRYPTED PRIVATE KEY-----') + }) + + it('throws if an invalid private key name is given', async () => { + const err = await ks._getPrivateKey(undefined).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') + }) + + it('throws if a private key cant be found', async () => { + const err = await ks._getPrivateKey('not real').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_KEY_NOT_FOUND') + }) + + it('does not overwrite existing key', async () => { + const err = await ks.createKey(rsaKeyName, 'rsa', 2048).then(fail, err => err) + expect(err).to.have.property('code', 'ERR_KEY_ALREADY_EXISTS') + }) + + it('cannot create the "self" key', async () => { + const err = await ks.createKey('self', 'rsa', 2048).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') + }) + + it('should validate name is string', async () => { + const err = await ks.createKey(5, 'rsa', 2048).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') + }) + + it('should validate type is string', async () => { + const err = await ks.createKey('TEST' + Date.now(), null, 2048).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_KEY_TYPE') + }) + + it('should validate size is integer', async () => { + const err = await ks.createKey('TEST' + Date.now(), 'rsa', 'string').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_KEY_SIZE') + }) + + describe('implements NIST SP 800-131A', () => { + it('disallows RSA length < 2048', async () => { + const err = await ks.createKey('bad-nist-rsa', 'rsa', 1024).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_KEY_SIZE') + }) + }) + }) + + describe('query', () => { + it('finds all existing keys', async () => { + const keys = await ks.list() + expect(keys).to.exist() + const mykey = keys.find((k) => k.name.normalize() === rsaKeyName.normalize()) + expect(mykey).to.exist() + }) + + it('finds a key by name', async () => { + const key = await ks.findByName(rsaKeyName) + expect(key).to.exist() + expect(key).to.deep.equal(rsaKeyInfo) + }) + + it('finds a key by id', async () => { + const key = await ks.findById(rsaKeyInfo.id) + expect(key).to.exist() + expect(key).to.deep.equal(rsaKeyInfo) + }) + + it('returns the key\'s name and id', async () => { + const keys = await ks.list() + expect(keys).to.exist() + keys.forEach((key) => { + expect(key).to.have.property('name') + expect(key).to.have.property('id') + }) + }) + }) + + describe('CMS protected data', () => { + const plainData = Buffer.from('This is a message from Alice to Bob') + let cms + + it('service is available', () => { + expect(ks).to.have.property('cms') + }) + + it('requires a key', async () => { + const err = await ks.cms.encrypt('no-key', plainData).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_KEY_NOT_FOUND') + }) + + it('requires plain data as a Buffer', async () => { + const err = await ks.cms.encrypt(rsaKeyName, 'plain data').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_PARAMS') + }) + + it('encrypts', async () => { + cms = await ks.cms.encrypt(rsaKeyName, plainData) + expect(cms).to.exist() + expect(cms).to.be.instanceOf(Buffer) + }) + + it('is a PKCS #7 message', async () => { + const err = await ks.cms.decrypt('not CMS').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_PARAMS') + }) + + it('is a PKCS #7 binary message', async () => { + const err = await ks.cms.decrypt(plainData).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_CMS') + }) + + it('cannot be read without the key', async () => { + const err = await emptyKeystore.cms.decrypt(cms).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('missingKeys') + expect(err.missingKeys).to.eql([rsaKeyInfo.id]) + expect(err).to.have.property('code', 'ERR_MISSING_KEYS') + }) + + it('can be read with the key', async () => { + const plain = await ks.cms.decrypt(cms) + expect(plain).to.exist() + expect(plain.toString()).to.equal(plainData.toString()) + }) + }) + + describe('exported key', () => { + let pemKey + + it('requires the password', async () => { + const err = await ks.exportKey(rsaKeyName).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_PASSWORD_REQUIRED') + }) + + it('requires the key name', async () => { + const err = await ks.exportKey(undefined, 'password').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') + }) + + it('is a PKCS #8 encrypted pem', async () => { + pemKey = await ks.exportKey(rsaKeyName, 'password') + expect(pemKey).to.startsWith('-----BEGIN ENCRYPTED PRIVATE KEY-----') + }) + + it('can be imported', async () => { + const key = await ks.importKey('imported-key', pemKey, 'password') + expect(key.name).to.equal('imported-key') + expect(key.id).to.equal(rsaKeyInfo.id) + }) + + it('requires the pem', async () => { + const err = await ks.importKey('imported-key', undefined, 'password').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_PEM_REQUIRED') + }) + + it('cannot be imported as an existing key name', async () => { + const err = await ks.importKey(rsaKeyName, pemKey, 'password').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_KEY_ALREADY_EXISTS') + }) + + it('cannot be imported with the wrong password', async () => { + const err = await ks.importKey('a-new-name-for-import', pemKey, 'not the password').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_CANNOT_READ_KEY') + }) + }) + + describe('peer id', () => { + const alicePrivKey = 'CAASpgkwggSiAgEAAoIBAQC2SKo/HMFZeBml1AF3XijzrxrfQXdJzjePBZAbdxqKR1Mc6juRHXij6HXYPjlAk01BhF1S3Ll4Lwi0cAHhggf457sMg55UWyeGKeUv0ucgvCpBwlR5cQ020i0MgzjPWOLWq1rtvSbNcAi2ZEVn6+Q2EcHo3wUvWRtLeKz+DZSZfw2PEDC+DGPJPl7f8g7zl56YymmmzH9liZLNrzg/qidokUv5u1pdGrcpLuPNeTODk0cqKB+OUbuKj9GShYECCEjaybJDl9276oalL9ghBtSeEv20kugatTvYy590wFlJkkvyl+nPxIH0EEYMKK9XRWlu9XYnoSfboiwcv8M3SlsjAgMBAAECggEAZtju/bcKvKFPz0mkHiaJcpycy9STKphorpCT83srBVQi59CdFU6Mj+aL/xt0kCPMVigJw8P3/YCEJ9J+rS8BsoWE+xWUEsJvtXoT7vzPHaAtM3ci1HZd302Mz1+GgS8Epdx+7F5p80XAFLDUnELzOzKftvWGZmWfSeDnslwVONkL/1VAzwKy7Ce6hk4SxRE7l2NE2OklSHOzCGU1f78ZzVYKSnS5Ag9YrGjOAmTOXDbKNKN/qIorAQ1bovzGoCwx3iGIatQKFOxyVCyO1PsJYT7JO+kZbhBWRRE+L7l+ppPER9bdLFxs1t5CrKc078h+wuUr05S1P1JjXk68pk3+kQKBgQDeK8AR11373Mzib6uzpjGzgNRMzdYNuExWjxyxAzz53NAR7zrPHvXvfIqjDScLJ4NcRO2TddhXAfZoOPVH5k4PJHKLBPKuXZpWlookCAyENY7+Pd55S8r+a+MusrMagYNljb5WbVTgN8cgdpim9lbbIFlpN6SZaVjLQL3J8TWH6wKBgQDSChzItkqWX11CNstJ9zJyUE20I7LrpyBJNgG1gtvz3ZMUQCn3PxxHtQzN9n1P0mSSYs+jBKPuoSyYLt1wwe10/lpgL4rkKWU3/m1Myt0tveJ9WcqHh6tzcAbb/fXpUFT/o4SWDimWkPkuCb+8j//2yiXk0a/T2f36zKMuZvujqQKBgC6B7BAQDG2H2B/ijofp12ejJU36nL98gAZyqOfpLJ+FeMz4TlBDQ+phIMhnHXA5UkdDapQ+zA3SrFk+6yGk9Vw4Hf46B+82SvOrSbmnMa+PYqKYIvUzR4gg34rL/7AhwnbEyD5hXq4dHwMNsIDq+l2elPjwm/U9V0gdAl2+r50HAoGALtsKqMvhv8HucAMBPrLikhXP/8um8mMKFMrzfqZ+otxfHzlhI0L08Bo3jQrb0Z7ByNY6M8epOmbCKADsbWcVre/AAY0ZkuSZK/CaOXNX/AhMKmKJh8qAOPRY02LIJRBCpfS4czEdnfUhYV/TYiFNnKRj57PPYZdTzUsxa/yVTmECgYBr7slQEjb5Onn5mZnGDh+72BxLNdgwBkhO0OCdpdISqk0F0Pxby22DFOKXZEpiyI9XYP1C8wPiJsShGm2yEwBPWXnrrZNWczaVuCbXHrZkWQogBDG3HGXNdU4MAWCyiYlyinIBpPpoAJZSzpGLmWbMWh28+RJS6AQX6KHrK1o2uw==' + let alice + + before(async function () { + const encoded = Buffer.from(alicePrivKey, 'base64') + alice = await PeerId.createFromPrivKey(encoded) + }) + + it('private key can be imported', async () => { + const key = await ks.importPeer('alice', alice) + expect(key.name).to.equal('alice') + expect(key.id).to.equal(alice.toB58String()) + }) + + it('private key import requires a valid name', async () => { + const err = await ks.importPeer(undefined, alice).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') + }) + + it('private key import requires the peer', async () => { + const err = await ks.importPeer('alice').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_MISSING_PRIVATE_KEY') + }) + + it('key id exists', async () => { + const key = await ks.findById(alice.toB58String()) + expect(key).to.exist() + expect(key).to.have.property('name', 'alice') + expect(key).to.have.property('id', alice.toB58String()) + }) + + it('key name exists', async () => { + const key = await ks.findByName('alice') + expect(key).to.exist() + expect(key).to.have.property('name', 'alice') + expect(key).to.have.property('id', alice.toB58String()) + }) + }) + + describe('rename', () => { + it('requires an existing key name', async () => { + const err = await ks.renameKey('not-there', renamedRsaKeyName).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_NOT_FOUND') + }) + + it('requires a valid new key name', async () => { + const err = await ks.renameKey(rsaKeyName, '..\not-valid').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_NEW_KEY_NAME_INVALID') + }) + + it('does not overwrite existing key', async () => { + const err = await ks.renameKey(rsaKeyName, rsaKeyName).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_KEY_ALREADY_EXISTS') + }) + + it('cannot create the "self" key', async () => { + const err = await ks.renameKey(rsaKeyName, 'self').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_NEW_KEY_NAME_INVALID') + }) + + it('removes the existing key name', async () => { + const key = await ks.renameKey(rsaKeyName, renamedRsaKeyName) + expect(key).to.exist() + expect(key).to.have.property('name', renamedRsaKeyName) + expect(key).to.have.property('id', rsaKeyInfo.id) + // Try to find the changed key + const err = await ks.findByName(rsaKeyName).then(fail, err => err) + expect(err).to.exist() + }) + + it('creates the new key name', async () => { + const key = await ks.findByName(renamedRsaKeyName) + expect(key).to.exist() + expect(key).to.have.property('name', renamedRsaKeyName) + }) + + it('does not change the key ID', async () => { + const key = await ks.findByName(renamedRsaKeyName) + expect(key).to.exist() + expect(key).to.have.property('name', renamedRsaKeyName) + expect(key).to.have.property('id', rsaKeyInfo.id) + }) + + it('throws with invalid key names', async () => { + const err = await ks.findByName(undefined).then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') + }) + }) + + describe('key removal', () => { + it('cannot remove the "self" key', async () => { + const err = await ks.removeKey('self').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') + }) + + it('cannot remove an unknown key', async () => { + const err = await ks.removeKey('not-there').then(fail, err => err) + expect(err).to.exist() + expect(err).to.have.property('code', 'ERR_KEY_NOT_FOUND') + }) + + it('can remove a known key', async () => { + const key = await ks.removeKey(renamedRsaKeyName) + expect(key).to.exist() + expect(key).to.have.property('name', renamedRsaKeyName) + expect(key).to.have.property('id', rsaKeyInfo.id) + }) + }) +}) diff --git a/test/keychain/keychain.spec.js b/test/keychain/keychain.spec.js index 9e40797b02..9b3cfc1adb 100644 --- a/test/keychain/keychain.spec.js +++ b/test/keychain/keychain.spec.js @@ -1,394 +1,38 @@ -/* eslint max-nested-callbacks: ["error", 8] */ -/* eslint-env mocha */ 'use strict' +/* eslint-env mocha */ const chai = require('chai') -const expect = chai.expect -const fail = expect.fail chai.use(require('dirty-chai')) -chai.use(require('chai-string')) - -const os = require('os') -const path = require('path') -const { isBrowser } = require('ipfs-utils/src/env') -const FsStore = require('datastore-fs') -const LevelStore = require('datastore-level') -const Keychain = require('../../src/keychain') -const PeerId = require('peer-id') - -describe('keychain', () => { - const passPhrase = 'this is not a secure phrase' - const rsaKeyName = 'tajné jméno' - const renamedRsaKeyName = 'ชื่อลับ' - let rsaKeyInfo - let emptyKeystore - let ks - let datastore1, datastore2 - - before(() => { - datastore1 = isBrowser - ? new LevelStore('test-keystore-1', { db: require('level') }) - : new FsStore(path.join(os.tmpdir(), 'test-keystore-1-' + Date.now())) - datastore2 = isBrowser - ? new LevelStore('test-keystore-2', { db: require('level') }) - : new FsStore(path.join(os.tmpdir(), 'test-keystore-2-' + Date.now())) - - ks = new Keychain(datastore2, { passPhrase: passPhrase }) - emptyKeystore = new Keychain(datastore1, { passPhrase: passPhrase }) - }) - - it('needs a pass phrase to encrypt a key', () => { - expect(() => new Keychain(datastore2)).to.throw() - }) - - it('needs a NIST SP 800-132 non-weak pass phrase', () => { - expect(() => new Keychain(datastore2, { passPhrase: '< 20 character' })).to.throw() - }) - - it('needs a store to persist a key', () => { - expect(() => new Keychain(null, { passPhrase: passPhrase })).to.throw() - }) - - it('has default options', () => { - expect(Keychain.options).to.exist() - }) - - it('needs a supported hashing alorithm', () => { - const ok = new Keychain(datastore2, { passPhrase: passPhrase, dek: { hash: 'sha2-256' } }) - expect(ok).to.exist() - expect(() => new Keychain(datastore2, { passPhrase: passPhrase, dek: { hash: 'my-hash' } })).to.throw() - }) - - it('can generate options', () => { - const options = Keychain.generateOptions() - options.passPhrase = passPhrase - const chain = new Keychain(datastore2, options) - expect(chain).to.exist() - }) - - describe('key name', () => { - it('is a valid filename and non-ASCII', async () => { - const errors = await Promise.all([ - ks.removeKey('../../nasty').then(fail, err => err), - ks.removeKey('').then(fail, err => err), - ks.removeKey(' ').then(fail, err => err), - ks.removeKey(null).then(fail, err => err), - ks.removeKey(undefined).then(fail, err => err) - ]) - - expect(errors).to.have.length(5) - errors.forEach(error => { - expect(error).to.have.property('code', 'ERR_INVALID_KEY_NAME') - }) - }) - }) - - describe('key', () => { - it('can be an RSA key', async () => { - rsaKeyInfo = await ks.createKey(rsaKeyName, 'rsa', 2048) - expect(rsaKeyInfo).to.exist() - expect(rsaKeyInfo).to.have.property('name', rsaKeyName) - expect(rsaKeyInfo).to.have.property('id') - }) - - it('is encrypted PEM encoded PKCS #8', async () => { - const pem = await ks._getPrivateKey(rsaKeyName) - return expect(pem).to.startsWith('-----BEGIN ENCRYPTED PRIVATE KEY-----') - }) +const { expect } = chai - it('throws if an invalid private key name is given', async () => { - const err = await ks._getPrivateKey(undefined).then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') - }) - - it('throws if a private key cant be found', async () => { - const err = await ks._getPrivateKey('not real').then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_KEY_NOT_FOUND') - }) +const peerUtils = require('../utils/creators/peer') - it('does not overwrite existing key', async () => { - const err = await ks.createKey(rsaKeyName, 'rsa', 2048).then(fail, err => err) - expect(err).to.have.property('code', 'ERR_KEY_ALREADY_EXISTS') +describe('libp2p.keychain', () => { + it('needs a passphrase to be used, otherwise throws an error', async () => { + const [libp2p] = await peerUtils.createPeer({ + started: false }) - it('cannot create the "self" key', async () => { - const err = await ks.createKey('self', 'rsa', 2048).then(fail, err => err) + try { + await libp2p.keychain.createKey('keyName', 'rsa', 2048) + } catch (err) { expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') - }) - - it('should validate name is string', async () => { - const err = await ks.createKey(5, 'rsa', 2048).then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') - }) - - it('should validate type is string', async () => { - const err = await ks.createKey('TEST' + Date.now(), null, 2048).then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_INVALID_KEY_TYPE') - }) - - it('should validate size is integer', async () => { - const err = await ks.createKey('TEST' + Date.now(), 'rsa', 'string').then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_INVALID_KEY_SIZE') - }) - - describe('implements NIST SP 800-131A', () => { - it('disallows RSA length < 2048', async () => { - const err = await ks.createKey('bad-nist-rsa', 'rsa', 1024).then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_INVALID_KEY_SIZE') - }) - }) - }) - - describe('query', () => { - it('finds all existing keys', async () => { - const keys = await ks.listKeys() - expect(keys).to.exist() - const mykey = keys.find((k) => k.name.normalize() === rsaKeyName.normalize()) - expect(mykey).to.exist() - }) - - it('finds a key by name', async () => { - const key = await ks.findKeyByName(rsaKeyName) - expect(key).to.exist() - expect(key).to.deep.equal(rsaKeyInfo) - }) - - it('finds a key by id', async () => { - const key = await ks.findKeyById(rsaKeyInfo.id) - expect(key).to.exist() - expect(key).to.deep.equal(rsaKeyInfo) - }) - - it('returns the key\'s name and id', async () => { - const keys = await ks.listKeys() - expect(keys).to.exist() - keys.forEach((key) => { - expect(key).to.have.property('name') - expect(key).to.have.property('id') - }) - }) + return + } + throw new Error('should throw an error using the keychain if no passphrase provided') }) - describe('CMS protected data', () => { - const plainData = Buffer.from('This is a message from Alice to Bob') - let cms - - it('service is available', () => { - expect(ks).to.have.property('cms') - }) - - it('requires a key', async () => { - const err = await ks.cms.encrypt('no-key', plainData).then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_KEY_NOT_FOUND') - }) - - it('requires plain data as a Buffer', async () => { - const err = await ks.cms.encrypt(rsaKeyName, 'plain data').then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_INVALID_PARAMS') - }) - - it('encrypts', async () => { - cms = await ks.cms.encrypt(rsaKeyName, plainData) - expect(cms).to.exist() - expect(cms).to.be.instanceOf(Buffer) - }) - - it('is a PKCS #7 message', async () => { - const err = await ks.cms.decrypt('not CMS').then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_INVALID_PARAMS') - }) - - it('is a PKCS #7 binary message', async () => { - const err = await ks.cms.decrypt(plainData).then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_INVALID_CMS') - }) - - it('cannot be read without the key', async () => { - const err = await emptyKeystore.cms.decrypt(cms).then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('missingKeys') - expect(err.missingKeys).to.eql([rsaKeyInfo.id]) - expect(err).to.have.property('code', 'ERR_MISSING_KEYS') + it('can be used if a passphrase is provided', async () => { + const [libp2p] = await peerUtils.createPeer({ + started: false, + config: { + keychain: { + pass: '12345678901234567890' + } + } }) - it('can be read with the key', async () => { - const plain = await ks.cms.decrypt(cms) - expect(plain).to.exist() - expect(plain.toString()).to.equal(plainData.toString()) - }) - }) - - describe('exported key', () => { - let pemKey - - it('requires the password', async () => { - const err = await ks.exportKey(rsaKeyName).then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_PASSWORD_REQUIRED') - }) - - it('requires the key name', async () => { - const err = await ks.exportKey(undefined, 'password').then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') - }) - - it('is a PKCS #8 encrypted pem', async () => { - pemKey = await ks.exportKey(rsaKeyName, 'password') - expect(pemKey).to.startsWith('-----BEGIN ENCRYPTED PRIVATE KEY-----') - }) - - it('can be imported', async () => { - const key = await ks.importKey('imported-key', pemKey, 'password') - expect(key.name).to.equal('imported-key') - expect(key.id).to.equal(rsaKeyInfo.id) - }) - - it('requires the pem', async () => { - const err = await ks.importKey('imported-key', undefined, 'password').then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_PEM_REQUIRED') - }) - - it('cannot be imported as an existing key name', async () => { - const err = await ks.importKey(rsaKeyName, pemKey, 'password').then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_KEY_ALREADY_EXISTS') - }) - - it('cannot be imported with the wrong password', async () => { - const err = await ks.importKey('a-new-name-for-import', pemKey, 'not the password').then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_CANNOT_READ_KEY') - }) - }) - - describe('peer id', () => { - const alicePrivKey = 'CAASpgkwggSiAgEAAoIBAQC2SKo/HMFZeBml1AF3XijzrxrfQXdJzjePBZAbdxqKR1Mc6juRHXij6HXYPjlAk01BhF1S3Ll4Lwi0cAHhggf457sMg55UWyeGKeUv0ucgvCpBwlR5cQ020i0MgzjPWOLWq1rtvSbNcAi2ZEVn6+Q2EcHo3wUvWRtLeKz+DZSZfw2PEDC+DGPJPl7f8g7zl56YymmmzH9liZLNrzg/qidokUv5u1pdGrcpLuPNeTODk0cqKB+OUbuKj9GShYECCEjaybJDl9276oalL9ghBtSeEv20kugatTvYy590wFlJkkvyl+nPxIH0EEYMKK9XRWlu9XYnoSfboiwcv8M3SlsjAgMBAAECggEAZtju/bcKvKFPz0mkHiaJcpycy9STKphorpCT83srBVQi59CdFU6Mj+aL/xt0kCPMVigJw8P3/YCEJ9J+rS8BsoWE+xWUEsJvtXoT7vzPHaAtM3ci1HZd302Mz1+GgS8Epdx+7F5p80XAFLDUnELzOzKftvWGZmWfSeDnslwVONkL/1VAzwKy7Ce6hk4SxRE7l2NE2OklSHOzCGU1f78ZzVYKSnS5Ag9YrGjOAmTOXDbKNKN/qIorAQ1bovzGoCwx3iGIatQKFOxyVCyO1PsJYT7JO+kZbhBWRRE+L7l+ppPER9bdLFxs1t5CrKc078h+wuUr05S1P1JjXk68pk3+kQKBgQDeK8AR11373Mzib6uzpjGzgNRMzdYNuExWjxyxAzz53NAR7zrPHvXvfIqjDScLJ4NcRO2TddhXAfZoOPVH5k4PJHKLBPKuXZpWlookCAyENY7+Pd55S8r+a+MusrMagYNljb5WbVTgN8cgdpim9lbbIFlpN6SZaVjLQL3J8TWH6wKBgQDSChzItkqWX11CNstJ9zJyUE20I7LrpyBJNgG1gtvz3ZMUQCn3PxxHtQzN9n1P0mSSYs+jBKPuoSyYLt1wwe10/lpgL4rkKWU3/m1Myt0tveJ9WcqHh6tzcAbb/fXpUFT/o4SWDimWkPkuCb+8j//2yiXk0a/T2f36zKMuZvujqQKBgC6B7BAQDG2H2B/ijofp12ejJU36nL98gAZyqOfpLJ+FeMz4TlBDQ+phIMhnHXA5UkdDapQ+zA3SrFk+6yGk9Vw4Hf46B+82SvOrSbmnMa+PYqKYIvUzR4gg34rL/7AhwnbEyD5hXq4dHwMNsIDq+l2elPjwm/U9V0gdAl2+r50HAoGALtsKqMvhv8HucAMBPrLikhXP/8um8mMKFMrzfqZ+otxfHzlhI0L08Bo3jQrb0Z7ByNY6M8epOmbCKADsbWcVre/AAY0ZkuSZK/CaOXNX/AhMKmKJh8qAOPRY02LIJRBCpfS4czEdnfUhYV/TYiFNnKRj57PPYZdTzUsxa/yVTmECgYBr7slQEjb5Onn5mZnGDh+72BxLNdgwBkhO0OCdpdISqk0F0Pxby22DFOKXZEpiyI9XYP1C8wPiJsShGm2yEwBPWXnrrZNWczaVuCbXHrZkWQogBDG3HGXNdU4MAWCyiYlyinIBpPpoAJZSzpGLmWbMWh28+RJS6AQX6KHrK1o2uw==' - let alice - - before(async function () { - const encoded = Buffer.from(alicePrivKey, 'base64') - alice = await PeerId.createFromPrivKey(encoded) - }) - - it('private key can be imported', async () => { - const key = await ks.importPeer('alice', alice) - expect(key.name).to.equal('alice') - expect(key.id).to.equal(alice.toB58String()) - }) - - it('private key import requires a valid name', async () => { - const err = await ks.importPeer(undefined, alice).then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') - }) - - it('private key import requires the peer', async () => { - const err = await ks.importPeer('alice').then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_MISSING_PRIVATE_KEY') - }) - - it('key id exists', async () => { - const key = await ks.findKeyById(alice.toB58String()) - expect(key).to.exist() - expect(key).to.have.property('name', 'alice') - expect(key).to.have.property('id', alice.toB58String()) - }) - - it('key name exists', async () => { - const key = await ks.findKeyByName('alice') - expect(key).to.exist() - expect(key).to.have.property('name', 'alice') - expect(key).to.have.property('id', alice.toB58String()) - }) - }) - - describe('rename', () => { - it('requires an existing key name', async () => { - const err = await ks.renameKey('not-there', renamedRsaKeyName).then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_NOT_FOUND') - }) - - it('requires a valid new key name', async () => { - const err = await ks.renameKey(rsaKeyName, '..\not-valid').then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_NEW_KEY_NAME_INVALID') - }) - - it('does not overwrite existing key', async () => { - const err = await ks.renameKey(rsaKeyName, rsaKeyName).then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_KEY_ALREADY_EXISTS') - }) - - it('cannot create the "self" key', async () => { - const err = await ks.renameKey(rsaKeyName, 'self').then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_NEW_KEY_NAME_INVALID') - }) - - it('removes the existing key name', async () => { - const key = await ks.renameKey(rsaKeyName, renamedRsaKeyName) - expect(key).to.exist() - expect(key).to.have.property('name', renamedRsaKeyName) - expect(key).to.have.property('id', rsaKeyInfo.id) - // Try to find the changed key - const err = await ks.findKeyByName(rsaKeyName).then(fail, err => err) - expect(err).to.exist() - }) - - it('creates the new key name', async () => { - const key = await ks.findKeyByName(renamedRsaKeyName) - expect(key).to.exist() - expect(key).to.have.property('name', renamedRsaKeyName) - }) - - it('does not change the key ID', async () => { - const key = await ks.findKeyByName(renamedRsaKeyName) - expect(key).to.exist() - expect(key).to.have.property('name', renamedRsaKeyName) - expect(key).to.have.property('id', rsaKeyInfo.id) - }) - - it('throws with invalid key names', async () => { - const err = await ks.findKeyByName(undefined).then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') - }) - }) - - describe('key removal', () => { - it('cannot remove the "self" key', async () => { - const err = await ks.removeKey('self').then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_INVALID_KEY_NAME') - }) - - it('cannot remove an unknown key', async () => { - const err = await ks.removeKey('not-there').then(fail, err => err) - expect(err).to.exist() - expect(err).to.have.property('code', 'ERR_KEY_NOT_FOUND') - }) - - it('can remove a known key', async () => { - const key = await ks.removeKey(renamedRsaKeyName) - expect(key).to.exist() - expect(key).to.have.property('name', renamedRsaKeyName) - expect(key).to.have.property('id', rsaKeyInfo.id) - }) + const kInfo = await libp2p.keychain.createKey('keyName', 'rsa', 2048) + expect(kInfo).to.exist() }) })