diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index bbf97224..abdb0987 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -10,25 +10,37 @@ stages:
- teardown
- publish
-lint:
- image: node:8
- stage: lint
+##############################################################
+# #
+# Jobs and commands templates #
+# #
+##############################################################
+.install_unittest_packages_cmd: &install_unittest_packages_cmd
+- npm run install-test
+
+.run_unittest_cmd: &run_unittest_cmd
+- npm run test-only
+
+.job_definition: &job_definition
tags:
- docker-executor
- script:
- - npm run install-test
- - npm run lint
-# BIG-IP 13.x and BIG-IP 14.0, unittests only (without coverage check)
-test_node4:
- image: node:4
+.test_job_definition: &test_job_definition
+ extends:
+ - .job_definition
stage: test
+
+.harness_deployment_definition: &harness_deployment_definition
+ image: ${CICD_CONTAINER_DEPLOY}
tags:
- - docker-executor
+ - cm-official-docker-executor
+
+.run_unittest:
+ extends:
+ - .test_job_definition
script:
- - npm run install-test
- - npm install mocha@5.2.0
- - npm run test-only
+ - *install_unittest_packages_cmd
+ - *run_unittest_cmd
artifacts:
name: ${CI_COMMIT_REF_NAME}_unittests_artifacts
paths:
@@ -36,75 +48,74 @@ test_node4:
when: on_failure
expire_in: 3 days
+##############################################################
+# #
+# Jobs #
+# #
+##############################################################
+
+lint:
+ extends:
+ - .test_job_definition
+ image: node:8
+ stage: lint
+ script:
+ - *install_unittest_packages_cmd
+ - npm run lint
+
+# BIG-IP 13.x and BIG-IP 14.0, unittests only (without coverage check)
+test_node4:
+ extends:
+ - .run_unittest
+ image: node:4
+
# just in case, unittests only (without coverage check)
test_node6:
+ extends:
+ - .run_unittest
image: node:6
- stage: test
- tags:
- - docker-executor
- script:
- - npm run install-test
- - npm run test-only
- artifacts:
- name: ${CI_COMMIT_REF_NAME}_unittests_artifacts
- paths:
- - test/artifacts
- when: on_failure
- expire_in: 3 days
# BIG-IP 14.1+, unittests only (without coverage check)
test_node8:
- stage: test
- tags:
- - docker-executor
- script:
- - npm run install-test
- - npm run test-only
- artifacts:
- name: ${CI_COMMIT_REF_NAME}_unittests_artifacts
- paths:
- - test/artifacts
- when: on_failure
- expire_in: 3 days
+ extends:
+ - .run_unittest
+ image: node:8
# mostly for containers, unittests only (without coverage check)
test_node_latest:
+ extends:
+ - .run_unittest
image: node:latest
- stage: test
- tags:
- - docker-executor
- script:
- - npm run install-test
- - npm run test-only
- artifacts:
- name: ${CI_COMMIT_REF_NAME}_unittests_artifacts
- paths:
- - test/artifacts
- when: on_failure
- expire_in: 3 days
-# run tests and check code coverage
-coverage:
- stage: test
+# packages audit
+npm_audit:
+ extends:
+ - .test_job_definition
+ allow_failure: true
script:
# install jq
- apt-get update
- apt-get install -y jq
# install node modules
- - npm run install-test
+ - *install_unittest_packages_cmd
# npm audit - install includes audit, but perform specific check and fail if needed
- - audit_report=$(npm audit --json)
- - echo $audit_report
+ - audit_report=$(npm audit --json) || echo ""
+ - echo "$audit_report"
- actions=$(echo $audit_report | jq .actions | jq length)
- if [ $actions -ne 0 ]; then echo 'ERROR! vulnerabilities exist'; exit 1; fi
- # unit tests
+
+# run tests and check code coverage
+coverage:
+ extends:
+ - .test_job_definition
+ script:
+ - *install_unittest_packages_cmd
+ # run tests with coverage report
- npm test
artifacts:
name: ${CI_COMMIT_REF_NAME}_unittests_coverage
paths:
- coverage
- tags:
- - cm-official-docker-executor
build_rpm:
image: f5devcentral/containthedocs:rpmbuild
@@ -145,23 +156,10 @@ build_docs:
- docs/_build/html
expire_in: 1 month
-# for this job following variables should be defined:
-# CICD_AUTH_OS_USERNAME - VIO user
-# CICD_AUTH_OS_PASSWORD - VIO password
-# CICD_AUTH_OS_PROJECT - VIO project
-# or
-# CICD_AUTH_OS_TOKEN - VIO auth token
-# CICD_AUTH_OS_PROJECT - VIO project
-# Also, variable to *enable* device pipeline should exist
-# REQ_DEVICE_PIPELINE - boolean
deploy_env:
- image: ${CICD_CONTAINER_DEPLOY}
+ extends:
+ - .harness_deployment_definition
stage: deploy
- tags:
- - cm-official-docker-executor
- variables:
- PROJECT_DECLARATION: ${CI_PROJECT_DIR}/test/functional/deployment/declaration.yml
- CUSTOM_DECLARATION: "yes"
artifacts:
name: ${CI_COMMIT_REF_NAME}_bigip.harness_info
paths:
@@ -171,24 +169,10 @@ deploy_env:
variables:
- $REQ_DEVICE_PIPELINE == "true"
script:
- - export PROJECT_NAME=$([ "${CICD_PROJECT_NAME}" == "" ] && echo "test_functional_harness" || echo "${CICD_PROJECT_NAME}")
- - export PROJECT_DIR="/root/deploy-projects/${PROJECT_NAME}"
- - declaration=$(sed "s/_DEPLOYMENT_NAME_/${PROJECT_NAME}/g" "${PROJECT_DECLARATION}")
- - echo "$declaration" > "${PROJECT_DECLARATION}"
- - cat "${PROJECT_DECLARATION}"
- - cd /root/cicd-bigip-deploy && make configure &&
- make printvars &&
- make setup && ls -als ${PROJECT_DIR} &&
- cp ${PROJECT_DIR}/harness_facts_flat.json ${CI_PROJECT_DIR}/harness_facts_flat.json
+ - $SHELL ./scripts/functional-testing/setup.sh
test_functional:
stage: functional test
- script:
- - export TEST_HARNESS_FILE=${CI_PROJECT_DIR}/harness_facts_flat.json
- # really only need dev dependencies
- - npm run install-test
- - ls ./dist -ls
- - npm run test-functional
# troubleshooting functional test failures typically requires looking at logs, one of which is
# the restnoded log that is captured by the functional tests. This saves off the folder
# containing that log as an artifact to speed up the troubleshooting process
@@ -204,29 +188,24 @@ test_functional:
variables:
# enable this job
- $RUN_FUNCTIONAL_TESTS == "true"
+ script:
+ - export TEST_HARNESS_FILE=${CI_PROJECT_DIR}/harness_facts_flat.json
+ - ls ./dist -ls
+ # really only need dev dependencies
+ - *install_unittest_packages_cmd
+ - npm install mocha@7.1.0
+ - npm run test-functional
-# should be executed manually to remove the harness
teardown_env:
- image: ${CICD_CONTAINER_DEPLOY}
+ extends:
+ - .harness_deployment_definition
stage: teardown
- tags:
- - cm-official-docker-executor
- variables:
- PROJECT_DECLARATION: ${CI_PROJECT_DIR}/test/functional/deployment/declaration.yml
- CUSTOM_DECLARATION: "yes"
- script:
- - export PROJECT_NAME=$([ "${CICD_PROJECT_NAME}" == "" ] && echo "test_functional_harness" || echo "${CICD_PROJECT_NAME}")
- - export PROJECT_DIR="/root/deploy-projects/${PROJECT_NAME}"
- - declaration=$(sed "s/_DEPLOYMENT_NAME_/${PROJECT_NAME}/g" "${PROJECT_DECLARATION}")
- - echo "$declaration" > "${PROJECT_DECLARATION}"
- - cat "${PROJECT_DECLARATION}"
- - cd /root/cicd-bigip-deploy && make configure &&
- make printvars &&
- make teardown
when: manual
only:
variables:
- $REQ_DEVICE_PIPELINE == "true"
+ script:
+ - $SHELL ./scripts/functional-testing/teardown.sh
# Publish to internal artifactory
# Note: Will publish when new tags are pushed and use the current build in dist directory
@@ -289,6 +268,7 @@ pages:
only:
# only update on designated, stable branch
- develop
+ - doc-release-branch
# Publish docs to clouddocs.f5networks.net
publish_docs_to_staging:
diff --git a/CHANGELOG.md b/CHANGELOG.md
index b959b0de..6e225e12 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,6 +1,25 @@
# Changelog
Changes to this project are documented in this file. More detail and links can be found in the Telemetry Streaming [Document Revision History](https://clouddocs.f5.com/products/extensions/f5-telemetry-streaming/latest/revision-history.html).
+## 1.10.0
+### Added
+- AUTOTOOL-1111: Enable configurable polling with Telemetry_Endpoints (BIG-IP paths) and multiple system poller support
+- AUTOTOOL-1148: Allow 'OR' logic by adding ifAnyMatch functionality.
+- AUTOTOOL-853: Support F5 systems (ex: Viprion) that have multiple hosts
+### Fixed
+- AUTOTOOL-1051: Event Listener unable to classify AFM DoS event
+- AUTOTOOL-1037: Splunk legacy tmstats - include last_cycle_count
+- AUTOTOOL-1019: Splunk legacy tmstats - add tenant and application data
+- AUTOTOOL-1128: Declarations with large secrets may timeout
+- AUTOTOOL-1154: Passphrases should be obfuscated in consumer trace files
+- AUTOTOOL-1147: Add 'profiles' data (profiles attached to Virtual Server) to 'virtualServers'
+- AUTOTOOL-896: [GitHub #26](https://github.com/F5Networks/f5-telemetry-streaming/pull/26): Use baseMac instead of hostname to fetch CM device
+- AUTOTOOL-1160: cipherText validation when protected by SecureVault
+- AUTOTOOL-1239: Caching data about the host device to speed up declaration processing
+### Changed
+- AUTOTOOL-1062: Update NPM packages
+### Removed
+
## 1.9.0
### Added
- AUTOTOOL-725 and AUTOTOOL-755: Add support for GSLB WideIP and Pools Config and Stats
diff --git a/SUPPORT.md b/SUPPORT.md
index 4649083a..4a9481dd 100644
--- a/SUPPORT.md
+++ b/SUPPORT.md
@@ -19,6 +19,7 @@ Currently supported versions:
|------------------|---------------|---------------------|-----------------|
| TS 1.8.0 | Feature | 03-Dec-2019 | 03-Mar-2020 |
| TS 1.9.0 | Feature | 28-Jan-2020 | 28-Apr-2020 |
+| TS 1.10.0 | Feature | 10-Mar-2020 | 10-Jun-2020 |
Versions no longer supported:
diff --git a/contributing/README.md b/contributing/README.md
index a41fe37f..6741853f 100644
--- a/contributing/README.md
+++ b/contributing/README.md
@@ -108,7 +108,7 @@ How does the project handle a typical `POST` request?
"trace": false,
"format": "default"
},
- "schemaVersion": "1.9.0"
+ "schemaVersion": "1.10.0"
}
}
```
@@ -181,7 +181,7 @@ Collect the raw data from the device by adding a new endpoint to the paths confi
```javascript
{
- "endpoint": "/mgmt/tm/sys/global-settings"
+ "path": "/mgmt/tm/sys/global-settings"
}
```
@@ -189,11 +189,11 @@ Collect the raw data from the device by adding a new endpoint to the paths confi
```javascript
{
- "endpoint": "/mgmt/tm/sys/someEndpoint", // REST endpoint
+ "path": "/mgmt/tm/sys/someEndpoint", // REST endpoint
"includeStats": true, // Certain data is only available via /mgmt/tm/sys/someEndpoint as opposed to /mgmt/tm/sys/someEndpoint/stats, this property accomodates for this by making call to /stats (for each item) and adding that data to the original object
"expandReferences": { "membersReference": { "endpointSuffix": "/stats" } }, // Certain data requires getting a list of objects and then in each object expanding/following references to a child object. 'membersReference' is the name of that key (currently looking under 'items' in the data returned) and will result in self link data being retrived and 'membersReference' key being replaced with that data. If 'endpointSuffix' is supplied, a suffix is added to each self link prior to retrieval, otherwise, the value of self link as is will be used. In cases like gslb where both config and stats are needed, both the `link` and `link/stats` need to be fetched, hence, the resulting config is "expandReferences": { "membersReference": { "includeStats": true } }, which is equivalent to "expandReferences": { "membersReference": { "endpointSuffix": "", "includeStats": true } }. TODO: revisit keywords/ naming here to consolidate and avoid confusion
"endpointFields": [ "name", "fullPath", "selfLink", "ipProtocol", "mask" ], // Will collect only these fields from the endoint. Useful when using includeStats and the same property exists in both endpoints. Also can be used instead of a large exclude/include statement in properties.json
- "body": "{ \"command\": \"run\", \"utilCmdArgs\": \"-c \\\"/bin/df -P | /usr/bin/tr -s ' ' ','\\\"\" }", // Certain information may require using POST instead of GET and require an HTTP body, if body is defined that gets used along with a POST
+ "body": "{ \"command\": \"run\", \"utilCmdArgs\": \"-c \\\"/bin/df -P | /usr/bin/tr -s ' ' ','\\\"\" }", // Certain information may require using POST instead of GET and require an HTTP body, if body is defined that gets used along with a POST. Body can be either string or object
"name": "someStatRef", // Alternate name to reference in properties.json, default is to use the endpoint
"ignoreCached": true // Invalidate cached response of previous request to endpoint
}
diff --git a/contributing/process_release.md b/contributing/process_release.md
index d642f9ae..131e8ea3 100644
--- a/contributing/process_release.md
+++ b/contributing/process_release.md
@@ -23,7 +23,7 @@
* [package.json](package.json)
* [package-lock.json](package-lock.json)
* [project.spec](project.spec) (not required starting from 1.5)
- * [src/lib/constants.js](src/lib/constants.js)
+ * [src/lib/constants.js](src/lib/constants.js) (not required starting from 1.10)
* [src/schema/latest/base_schema.json](src/schema/latest/base_schema.json)
* [contributing/README.md](contributing/README.md) (example of response, optional)
* [docs/conf.py](docs/conf.py)
@@ -42,6 +42,8 @@
* 1.4.0 - 8.6 MB
* 1.7.0 - 8.6 MB
* 1.8.0 - 9.5 MB
+ * 1.9.0 - 9.5 MB
+ * 1.10.0 - 9.5 MB
* Install build to BIG-IP, navigate to folder `/var/config/rest/iapps/f5-telemetry/` and check following:
* Run `du -sh` and check that folder's size (shouldn't be much greater than previous versions):
* 1.4.0 - 65 MB
@@ -49,6 +51,8 @@
* 1.6.0 - 66 MB
* 1.7.0 - 66 MB
* 1.8.0 - 73 MB
+ * 1.9.0 - 73 MB
+ * 1.10.0 - 76 MB
* Check `nodejs/node_modules` folder - if you see `eslint`, `mocha` or something else from [package.json](package.json) `devDependencies` section - something wrong with build process. Probably some `npm` flags are work as not expected and it MUST BE FIXED before publishing.
* Ensure that all tests (unit tests and functional tests passed)
* Create pre-release tag and push it to GitLab:
diff --git a/docs/conf.py b/docs/conf.py
index 8f8317dc..08084c91 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -78,7 +78,7 @@
# The short X.Y version.
version = u''
# The full version, including alpha/beta/rc tags.
-release = u'1.9.0'
+release = u'1.10.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/docs/custom-endpoints.rst b/docs/custom-endpoints.rst
new file mode 100644
index 00000000..9e317e83
--- /dev/null
+++ b/docs/custom-endpoints.rst
@@ -0,0 +1,134 @@
+Appendix B: Configuring Custom Endpoints
+========================================
+
+.. WARNING:: Configuring custom Endpoints and multiple System poller support is currently an EXPERIMENTAL feature, and the associated API could change based on testing and user feedback.
+
+.. NOTE:: Custom endpoints are currently for BIG-IP only.
+
+Telemetry Streaming v1.10 allows you to define a list of named endpoints with paths in a new **Telemetry_Endpoints** class, and includes the ability to define multiple system pollers that can fetch specific custom endpoint(s).
+
+
+Using the Telemetry_Endpoints class
+-----------------------------------
+The Telemetry_Endpoints class is where you define your endpoints and their paths.
+
+
+
++--------------------+------------+---------------------------------------------------------------------------------------------------------+
+| Parameter | Required? | Description/Notes |
++====================+============+=========================================================================================================+
+| class | Yes | Telemetry_Endpoints |
++--------------------+------------+---------------------------------------------------------------------------------------------------------+
+| basePath | No | Optional base path value to prepend to each individual endpoint path you specify in "items" |
++--------------------+------------+---------------------------------------------------------------------------------------------------------+
+| enable | No | Whether you want to enable this class. The default is **true**. |
++--------------------+------------+---------------------------------------------------------------------------------------------------------+
+| items | Yes | Object with each property an endpoint with their own properties. |
++--------------------+------------+---------------------------------------------------------------------------------------------------------+
+| \- name | No | Optional name for the item |
++--------------------+------------+---------------------------------------------------------------------------------------------------------+
+| \- path | Yes | Path to query data from |
++--------------------+------------+---------------------------------------------------------------------------------------------------------+
+
+
+For example, your declaration could include the following snippet, which contains endpoints for profiles, and for total connections for a virtual:
+
+.. code-block:: json
+
+ {
+ "Endpoints_Profiles": {
+ "class": "Telemetry_Endpoints",
+ "basePath": "/mgmt/tm/ltm/profile",
+ "items": {
+ "radiusProfiles": {
+ "name": "radiusProfiles",
+ "path": "radius/stats"
+ },
+ "ipOtherProfiles": {
+ "name": "ipOtherProfiles",
+ "path": "ipother/stats"
+ }
+ }
+ },
+ "Endpoints_Misc": {
+ "class": "Telemetry_Endpoints",
+ "items": {
+ "clientside.totConns": {
+ "name": "virtualTotConns",
+ "path": "/mgmt/ltm/virtual/stats?$select=clientside.totConns"
+ },
+ "virtualAddress": {
+ "path": "/mgmt/tm/ltm/virtual-address/stats"
+ }
+ }
+ }
+ }
+
+|
+
+Creating System Pollers specific to the custom endpoint
+-------------------------------------------------------
+Because you might want to specify different polling intervals for the custom endpoints, v1.10.0 also enables the ability to create an system poller specific to an endpoint or array of endpoints. To do this, you use the new **endpointList** property in your system poller definition.
+
+EndpointList is simply a list of endpoints to use in data collection, and can include the following types:
+
+* **Array** |br| When using an array, the item in the array must be one of the following: |br| |br|
+
+ 1. Name of the Telemetry_Endpoints object (for example, ``Endpoints_Profiles``)
+ 2. Name of Telemetry_Endpoints object and the endpoint object key (``Endpoints_Profiles/radiusProfiles``)
+ 3. A Telemetry_Endpoint (name is required). For example:
+
+ .. code-block:: json
+
+ {
+ "path": "mgmt/tm/net/vlan/stats",
+ "name": "requiredWhenInline"
+ }
+
+ 4. A Telemetry_Endpoints definition
+
+* **String** |br| The name of the Telemetry_Endpoints object
+
+* **Object** An object that conforms to the definition of the Telemetry_Endpoints class.
+
+The following is an example the system pollers, which correspond to the preceding Telemetry_Endpoints example:
+
+.. code-block:: json
+
+ {
+ "Custom_System_Poller1": {
+ "class": "Telemetry_System_Poller",
+ "interval": 60,
+ "enable": false,
+ "endpointList": "Endpoints_Profiles",
+ "trace": true
+ },
+ "Custom_System_Poller2": {
+ "class": "Telemetry_System_Poller",
+ "interval": 720,
+ "enable": true,
+ "endpointList": [
+ "Endpoints_Misc/clientside.totConns",
+ {
+ "path": "mgmt/tm/net/vlan/stats",
+ "name": "requiredWhenInline"
+ }
+ ]
+ }
+ }
+
+|
+
+
+Example declaration for using custom Endpoints with specific pollers
+--------------------------------------------------------------------
+The following example contains a complete example declaration for Telemetry Streaming, which includes the snippets in the examples above.
+
+.. literalinclude:: ../examples/declarations/system_custom_endpoints.json
+ :language: json
+
+
+
+.. |br| raw:: html
+
+
\ No newline at end of file
diff --git a/docs/data-modification.rst b/docs/data-modification.rst
index ea2ac5bf..fc4f2eb9 100644
--- a/docs/data-modification.rst
+++ b/docs/data-modification.rst
@@ -215,6 +215,53 @@ Example 2:
As result of the actions chain analysis, the Telemetry System will fetch **virtualServers** only and not **pools** (and not anything else) because only **virtualServers** should be included in the result's output.
+|
+
+.. _valuebased:
+
+Value-based matching
+--------------------
+.. sidebar:: :fonticon:`fa fa-info-circle fa-lg` Version Notice:
+
+ Support for value-based matching is available in TS v1.10.0 and later
+
+Telemetry Streaming v1.10 adds the **ifAnyMatch** functionality to the existing value-based matching logic. Value-based matching means that TS can filter based on the value of **ifAnyMatch** instead of just the presence of the field. You can provide multiple values, and the *Action* (**includeData, excludeData or setTag**, described in detail in the section starting with :ref:`include`) is triggered if any of the blocks in the array evaluate to true.
+
+The following example snippet uses the **includeData** action, so if any of the virtual servers in the **test** tenant are either enabled or disabled (and have a state of **available**), then *only* the virtualServer data is included. And because it uses **includeData**, the action must evaluate to true to occur, so if none of the virtualServers have a state of available, then ALL data is included.
+
+.. code-block:: bash
+
+ "actions": [
+ {
+ "includeData": {},
+ "ifAnyMatch": [
+ {
+ "virtualServers": {
+ "/test/*": {
+ "enabledState": "enabled",
+ "availabilityState": "available"
+ }
+ }
+ },
+ {
+ "virtualServers": {
+ "/test/*": {
+ "enabledState": "disabled",
+ "availabilityState": "available"
+ }
+ }
+ }
+ ],
+ "locations": {
+ "virtualServers": {
+ ".*": true
+ }
+ }
+ },
+
+
+For a complete declaration with value-based matching, see :ref:`value`.
+
|
|
diff --git a/docs/declarations.rst b/docs/declarations.rst
index f5d27110..16d49762 100644
--- a/docs/declarations.rst
+++ b/docs/declarations.rst
@@ -61,7 +61,6 @@ Example 4: iHealth Poller
|
-
.. _referencedpollers:
Example 5: Referenced Pollers
@@ -76,3 +75,37 @@ Example 5: Referenced Pollers
:ref:`Back to top`
|
+
+.. _customendpoint:
+
+Example 6: Custom Endpoints
+---------------------------
+.. IMPORTANT:: Configuring custom endpoints and multiple system pollers specific to those endpoints is currently EXPERIMENTAL and is available in TS v1.10.0 and later. See :doc:`custom-endpoints` for more information on this feature.
+
+|
+
+.. literalinclude:: ../examples/declarations/system_custom_endpoints.json
+ :language: json
+
+
+
+
+:ref:`Back to top`
+
+|
+
+.. _value:
+
+Example 7: Value-based matching
+-------------------------------
+.. IMPORTANT:: Value-based matching is available in TS v1.10.0 and later. See :ref:`valuebased` for more information on this feature.
+
+.. literalinclude:: ../examples/declarations/action_matching.json
+ :language: json
+
+
+
+
+:ref:`Back to top`
+
+|
\ No newline at end of file
diff --git a/docs/faq.rst b/docs/faq.rst
index 80712bb5..7ba97bb0 100644
--- a/docs/faq.rst
+++ b/docs/faq.rst
@@ -106,6 +106,23 @@ Telemetry Streaming does not currently enforce validation of the data that an ev
For complete information and examples, see :ref:`char-encoding`.
+|
+
+.. _contract:
+
+**What is F5's Automation Toolchain API Contract?**
+
+The API Contract for the F5 Automation Toolchain (Telemetry Streaming, AS3, and Declarative Onboarding) is our assurance that we will not make arbitrary breaking changes to our API. We take this commitment seriously. We semantically version our declarative API schemas ("xx.yy.zz") and do not make breaking changes within a minor ("yy") or patch ("zz") releases. For example, early declarations using AS3 schema "3.0.0" are accepted by all subsequent minor releases including "3.16.0."
+
+As of January 2020, no breaking changes have been made to AS3, Declarative Onboarding, or Telemetry Streaming since inception. None are anticipated at this time. A breaking change, if any, will be noted by a change to the major release number ("xx"). For example, the AS3 schema version would become "4.0.0."
+
+|
+
+.. _viprion:
+
+**Can I use Telemetry Streaming on F5 devices with multiple hosts, such as the Viprion platform?**
+
+Beginning with TS v1.10.0, you can use Telemetry Streaming on F5 devices with multiple hosts, such as the Viprion platform and vCMP systems. In versions prior to v1.10, devices with multiple hosts were not supported.
.. |intro| raw:: html
diff --git a/docs/index.rst b/docs/index.rst
index 9f00061c..892d7bfc 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -58,6 +58,7 @@ Previous buttons to explore the documentation.
troubleshooting
revision-history
schema-reference
+ custom-endpoints
.. |video| raw:: html
diff --git a/docs/output-example.rst b/docs/output-example.rst
index 04f4c43a..a0d4017e 100644
--- a/docs/output-example.rst
+++ b/docs/output-example.rst
@@ -9,6 +9,9 @@ Use this page to see the type of information that Telemetry Streaming collects.
System Information
------------------
+The following shows the system information that Telemetry Streaming collects.
+
+.. NOTE:: For some of the output to appear, you must have the applicable BIG-IP module licensed and provisioned (for example, you must have BIG-IP DNS provisioned to get GSLB wide IP and Pool information).
.. literalinclude:: ../examples/output/system_poller/output.json
:language: json
diff --git a/docs/revision-history.rst b/docs/revision-history.rst
index 9f3b84ee..c598746c 100644
--- a/docs/revision-history.rst
+++ b/docs/revision-history.rst
@@ -11,8 +11,12 @@ Document Revision History
- Description
- Date
+ * - 1.10.0
+ - Updated the documentation for Telemetry Streaming v1.10.0. This release contains the following changes: |br| * Added a feature (currently EXPERIMENTAL) for configuring custom endpoints (see :doc:`custom-endpoints`) |br| * Added **ifAnyMatch** functionality to the existing value-based matching logic (see :ref:`valuebased`) |br| * Added support for F5 devices with multiple hosts (see the :ref:`FAQ`) |br| |br| Issues Resolved: |br| * Event Listener unable to classify AFM DoS event |br| * Splunk legacy tmstats - include last_cycle_count |br| * Splunk legacy tmstats - add tenant and application data |br| * Declarations with large secrets may timeout |br| * Passphrases should be obfuscated in consumer trace files |br| * Add 'profiles' data (profiles attached to Virtual Server) to 'virtualServers' |br| * Use baseMac instead of hostname to fetch CM device (`GitHub Issue 26 `_) |br| * cipherText validation when protected by SecureVault |br| * Caching data about the host device to speed up declaration processing
+ - 03-10-20
+
* - 1.9.0
- - Updated the documentation for Telemetry Streaming v1.9.0. This release contains the following changes: |br| * Username and passphrase are now optional on the AWS CloudWatch consumer (see the important note in :ref:`awscloud-ref`) |br| * Added detailed information about character encoding and Telemetry Streaming (see :ref:`char-encoding`) |br| |br| Issues Resolved: |br| * Basic auth does not work with ElasticSearch consumer
+ - Updated the documentation for Telemetry Streaming v1.9.0. This release contains the following changes: |br| * Added support for gathering configuration information and statistics for GSLB Wide IP and Pools (see :ref:`System Information example output`) |br| * Username and passphrase are now optional on the AWS CloudWatch consumer (see the important note in :ref:`awscloud-ref`) |br| * Added detailed information about character encoding and Telemetry Streaming (see :ref:`char-encoding`) |br| * Added a FAQ entry to define the F5 Automation Toolchain API contract (see :ref:`What is the Automation Toolchain API Contract?`) |br| |br| Issues Resolved: |br| * Basic auth does not work with ElasticSearch consumer |br| * Some Splunk legacy tmstats datamodels have a period in property name instead of underscore
- 01-28-20
* - 1.8.0
diff --git a/examples/declarations/action_matching.json b/examples/declarations/action_matching.json
new file mode 100644
index 00000000..4faa5f1e
--- /dev/null
+++ b/examples/declarations/action_matching.json
@@ -0,0 +1,49 @@
+{
+ "class": "Telemetry",
+ "My_System": {
+ "class": "Telemetry_System",
+ "systemPoller": {
+ "interval": 60,
+ "actions": [
+ {
+ "includeData": {},
+ "ifAnyMatch": [
+ {
+ "virtualServers": {
+ "/test/*": {
+ "enabledState": "enabled",
+ "availabilityState": "available"
+ }
+ }
+ },
+ {
+ "virtualServers": {
+ "/test/*": {
+ "enabledState": "disabled",
+ "availabilityState": "available"
+ }
+ }
+ }
+ ],
+ "locations": {
+ "virtualServers": {
+ ".*": true
+ }
+ }
+ },
+ {
+ "excludeData": {},
+ "ifAllMatch": {
+ "system": {
+ "licenseReady": "no",
+ "provisionReady": "no"
+ }
+ },
+ "locations": {
+ ".*": true
+ }
+ }
+ ]
+ }
+ }
+}
diff --git a/examples/declarations/system_custom_endpoints.json b/examples/declarations/system_custom_endpoints.json
new file mode 100644
index 00000000..8dcc5c97
--- /dev/null
+++ b/examples/declarations/system_custom_endpoints.json
@@ -0,0 +1,66 @@
+{
+ "class": "Telemetry",
+ "Endpoints_Profiles": {
+ "class": "Telemetry_Endpoints",
+ "basePath": "/mgmt/tm/ltm/profile",
+ "items": {
+ "radiusProfiles": {
+ "name": "radiusProfiles",
+ "path": "radius/stats"
+ },
+ "ipOtherProfiles": {
+ "name": "ipOtherProfiles",
+ "path": "ipother/stats"
+ }
+ }
+ },
+ "Endpoints_Misc": {
+ "class": "Telemetry_Endpoints",
+ "items": {
+ "clientside.totConns": {
+ "name": "virtualTotConns",
+ "path": "/mgmt/ltm/virtual/stats?$select=clientside.totConns"
+ },
+ "virtualAddress": {
+ "path": "/mgmt/tm/ltm/virtual-address/stats"
+ }
+ }
+ },
+ "Custom_System": {
+ "class": "Telemetry_System",
+ "systemPoller": [
+ "Custom_System_Poller1",
+ "Custom_System_Poller2",
+ {
+ "interval": 60
+ }
+ ],
+ "enable": true,
+ "trace": true
+ },
+ "Custom_System_Poller1": {
+ "class": "Telemetry_System_Poller",
+ "interval": 60,
+ "enable": false,
+ "endpointList": "Endpoints_Profiles",
+ "trace": true
+ },
+ "Custom_System_Poller2": {
+ "class": "Telemetry_System_Poller",
+ "interval": 720,
+ "enable": true,
+ "endpointList": [
+ "Endpoints_Misc/clientside.totConns",
+ {
+ "path": "mgmt/tm/net/vlan/stats",
+ "name": "requiredWhenInline"
+ }
+ ]
+ },
+ "Default_System": {
+ "class": "Telemetry_System",
+ "systemPoller": {
+ "interval": 360
+ }
+ }
+}
diff --git a/examples/output/system_poller/output.json b/examples/output/system_poller/output.json
index 876172b2..8e5f8ae7 100644
--- a/examples/output/system_poller/output.json
+++ b/examples/output/system_poller/output.json
@@ -280,7 +280,19 @@
"ipProtocol": "tcp",
"tenant": "Common",
"pool": "/Common/foofoo.app/foofoo_pool",
- "application": "foofoo.app"
+ "application": "foofoo.app",
+ "profiles": {
+ "/Common/tcp": {
+ "name": "/Common/tcp",
+ "tenant": "Common"
+ },
+ "/Common/app/http": {
+ "name": "/Common/app/http",
+ "tenant": "Common",
+ "application": "app"
+ }
+ }
+
},
"/Example_Tenant/A1/serviceMain": {
"clientside.bitsIn": 0,
@@ -294,7 +306,8 @@
"ipProtocol": "tcp",
"tenant": "Example_Tenant",
"pool": "/Example_Tenant/A1/barbar_pool",
- "application": "A1"
+ "application": "A1",
+ "profiles": {}
},
"/Example_Tenant/A1/serviceMain-Redirect": {
"clientside.bitsIn": 0,
@@ -305,7 +318,18 @@
"enabledState": "enabled",
"name": "/Example_Tenant/A1/serviceMain-Redirect",
"tenant": "Example_Tenant",
- "application": "A1"
+ "application": "A1",
+ "profiles": {
+ "/Common/customTcp": {
+ "name": "/Common/customTcp",
+ "tenant": "Common"
+ },
+ "/Common/app/http": {
+ "name": "/Common/app/http",
+ "tenant": "Common",
+ "application": "app"
+ }
+ }
}
},
"pools": {
diff --git a/package-lock.json b/package-lock.json
index fba5c962..4715b99c 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,6 +1,6 @@
{
"name": "f5-telemetry",
- "version": "1.9.0",
+ "version": "1.10.0-2",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
@@ -164,11 +164,18 @@
}
},
"@f5devcentral/f5-teem": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/@f5devcentral/f5-teem/-/f5-teem-1.1.0.tgz",
- "integrity": "sha512-naNf4ZB5+H+qfpYvhfW+cxHZ37uXFc736UzQDhUNpymIO5NCZqSTunf7gg+STCZFNGYQmWf1czeHRBpMEOJn8w==",
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/@f5devcentral/f5-teem/-/f5-teem-1.2.0.tgz",
+ "integrity": "sha512-d9H3bzFcqREDdZNcqcbAOr+AFCOy3Alc8kudh3FkTqfWXqy/ooIuTb9Y+1Q8QayCWvp3fuXvQksprMuwQpnnfQ==",
"requires": {
- "uuid": "^3.3.2"
+ "uuid": "^3.4.0"
+ },
+ "dependencies": {
+ "uuid": {
+ "version": "3.4.0",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz",
+ "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A=="
+ }
}
},
"@sinonjs/commons": {
@@ -232,11 +239,11 @@
}
},
"ajv": {
- "version": "6.10.2",
- "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.2.tgz",
- "integrity": "sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw==",
+ "version": "6.12.0",
+ "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.0.tgz",
+ "integrity": "sha512-D6gFiFA0RRLyUbvijN74DWAjXSFxWKaWP7mldxkVhyhAV3+SWA9HEJPHQ2c9soIeTFJqcSdFDGFgdqs1iUU2Hw==",
"requires": {
- "fast-deep-equal": "^2.0.1",
+ "fast-deep-equal": "^3.1.1",
"fast-json-stable-stringify": "^2.0.0",
"json-schema-traverse": "^0.4.1",
"uri-js": "^4.2.2"
@@ -313,13 +320,182 @@
"dev": true
},
"array-includes": {
- "version": "3.0.3",
- "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.0.3.tgz",
- "integrity": "sha1-GEtI9i2S10UrsxsyMWXH+L0CJm0=",
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.1.tgz",
+ "integrity": "sha512-c2VXaCHl7zPsvpkFsw4nxvFie4fh1ur9bpcgsVkIjqn0H/Xwdg+7fv3n2r/isyS8EBj5b06M9kHyZuIr4El6WQ==",
"dev": true,
"requires": {
- "define-properties": "^1.1.2",
- "es-abstract": "^1.7.0"
+ "define-properties": "^1.1.3",
+ "es-abstract": "^1.17.0",
+ "is-string": "^1.0.5"
+ },
+ "dependencies": {
+ "es-abstract": {
+ "version": "1.17.4",
+ "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.4.tgz",
+ "integrity": "sha512-Ae3um/gb8F0mui/jPL+QiqmglkUsaQf7FwBEHYIFkztkneosu9imhqHpBzQ3h1vit8t5iQ74t6PEVvphBZiuiQ==",
+ "dev": true,
+ "requires": {
+ "es-to-primitive": "^1.2.1",
+ "function-bind": "^1.1.1",
+ "has": "^1.0.3",
+ "has-symbols": "^1.0.1",
+ "is-callable": "^1.1.5",
+ "is-regex": "^1.0.5",
+ "object-inspect": "^1.7.0",
+ "object-keys": "^1.1.1",
+ "object.assign": "^4.1.0",
+ "string.prototype.trimleft": "^2.1.1",
+ "string.prototype.trimright": "^2.1.1"
+ }
+ },
+ "es-to-primitive": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz",
+ "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==",
+ "dev": true,
+ "requires": {
+ "is-callable": "^1.1.4",
+ "is-date-object": "^1.0.1",
+ "is-symbol": "^1.0.2"
+ }
+ },
+ "has-symbols": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz",
+ "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==",
+ "dev": true
+ },
+ "is-callable": {
+ "version": "1.1.5",
+ "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz",
+ "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==",
+ "dev": true
+ },
+ "is-regex": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz",
+ "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==",
+ "dev": true,
+ "requires": {
+ "has": "^1.0.3"
+ }
+ },
+ "object-inspect": {
+ "version": "1.7.0",
+ "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz",
+ "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==",
+ "dev": true
+ },
+ "string.prototype.trimleft": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.1.tgz",
+ "integrity": "sha512-iu2AGd3PuP5Rp7x2kEZCrB2Nf41ehzh+goo8TV7z8/XDBbsvc6HQIlUl9RjkZ4oyrW1XM5UwlGl1oVEaDjg6Ag==",
+ "dev": true,
+ "requires": {
+ "define-properties": "^1.1.3",
+ "function-bind": "^1.1.1"
+ }
+ },
+ "string.prototype.trimright": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.1.tgz",
+ "integrity": "sha512-qFvWL3/+QIgZXVmJBfpHmxLB7xsUXz6HsUmP8+5dRaC3Q7oKUv9Vo6aMCRZC1smrtyECFsIT30PqBJ1gTjAs+g==",
+ "dev": true,
+ "requires": {
+ "define-properties": "^1.1.3",
+ "function-bind": "^1.1.1"
+ }
+ }
+ }
+ },
+ "array.prototype.flat": {
+ "version": "1.2.3",
+ "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.3.tgz",
+ "integrity": "sha512-gBlRZV0VSmfPIeWfuuy56XZMvbVfbEUnOXUvt3F/eUUUSyzlgLxhEX4YAEpxNAogRGehPSnfXyPtYyKAhkzQhQ==",
+ "dev": true,
+ "requires": {
+ "define-properties": "^1.1.3",
+ "es-abstract": "^1.17.0-next.1"
+ },
+ "dependencies": {
+ "es-abstract": {
+ "version": "1.17.4",
+ "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.4.tgz",
+ "integrity": "sha512-Ae3um/gb8F0mui/jPL+QiqmglkUsaQf7FwBEHYIFkztkneosu9imhqHpBzQ3h1vit8t5iQ74t6PEVvphBZiuiQ==",
+ "dev": true,
+ "requires": {
+ "es-to-primitive": "^1.2.1",
+ "function-bind": "^1.1.1",
+ "has": "^1.0.3",
+ "has-symbols": "^1.0.1",
+ "is-callable": "^1.1.5",
+ "is-regex": "^1.0.5",
+ "object-inspect": "^1.7.0",
+ "object-keys": "^1.1.1",
+ "object.assign": "^4.1.0",
+ "string.prototype.trimleft": "^2.1.1",
+ "string.prototype.trimright": "^2.1.1"
+ }
+ },
+ "es-to-primitive": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz",
+ "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==",
+ "dev": true,
+ "requires": {
+ "is-callable": "^1.1.4",
+ "is-date-object": "^1.0.1",
+ "is-symbol": "^1.0.2"
+ }
+ },
+ "has-symbols": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz",
+ "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==",
+ "dev": true
+ },
+ "is-callable": {
+ "version": "1.1.5",
+ "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz",
+ "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==",
+ "dev": true
+ },
+ "is-regex": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz",
+ "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==",
+ "dev": true,
+ "requires": {
+ "has": "^1.0.3"
+ }
+ },
+ "object-inspect": {
+ "version": "1.7.0",
+ "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz",
+ "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==",
+ "dev": true
+ },
+ "string.prototype.trimleft": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.1.tgz",
+ "integrity": "sha512-iu2AGd3PuP5Rp7x2kEZCrB2Nf41ehzh+goo8TV7z8/XDBbsvc6HQIlUl9RjkZ4oyrW1XM5UwlGl1oVEaDjg6Ag==",
+ "dev": true,
+ "requires": {
+ "define-properties": "^1.1.3",
+ "function-bind": "^1.1.1"
+ }
+ },
+ "string.prototype.trimright": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.1.tgz",
+ "integrity": "sha512-qFvWL3/+QIgZXVmJBfpHmxLB7xsUXz6HsUmP8+5dRaC3Q7oKUv9Vo6aMCRZC1smrtyECFsIT30PqBJ1gTjAs+g==",
+ "dev": true,
+ "requires": {
+ "define-properties": "^1.1.3",
+ "function-bind": "^1.1.1"
+ }
+ }
}
},
"asn1": {
@@ -361,19 +537,26 @@
"integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k="
},
"aws-sdk": {
- "version": "2.564.0",
- "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.564.0.tgz",
- "integrity": "sha512-X5MbcebjQ3iPNBvZ27WZyMEVCleBLqot2hqVz2M9XvMDR4B8qqPuteWrtbLu+DVjENvVD7Oj0BOIjrYEVWacFA==",
- "requires": {
- "buffer": "^4.9.1",
- "events": "^1.1.1",
- "ieee754": "^1.1.13",
- "jmespath": "^0.15.0",
- "querystring": "^0.2.0",
- "sax": "^1.2.1",
- "url": "^0.10.3",
- "uuid": "^3.3.2",
- "xml2js": "^0.4.19"
+ "version": "2.621.0",
+ "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.621.0.tgz",
+ "integrity": "sha512-wf87zTPXx2cILc9kAKTXcSrAc+vCc7BxE7G8vPEWAreCDucLHbynachYQvwO5ql+I3Eq651/X2XjnY01niSTNw==",
+ "requires": {
+ "buffer": "4.9.1",
+ "events": "1.1.1",
+ "ieee754": "1.1.13",
+ "jmespath": "0.15.0",
+ "querystring": "0.2.0",
+ "sax": "1.2.1",
+ "url": "0.10.3",
+ "uuid": "3.3.2",
+ "xml2js": "0.4.19"
+ },
+ "dependencies": {
+ "uuid": {
+ "version": "3.3.2",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz",
+ "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA=="
+ }
}
},
"aws-sign2": {
@@ -382,9 +565,9 @@
"integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg="
},
"aws4": {
- "version": "1.8.0",
- "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz",
- "integrity": "sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ=="
+ "version": "1.9.1",
+ "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.9.1.tgz",
+ "integrity": "sha512-wMHVg2EOHaMRxbzgFJ9gtjOOCrI80OHLG14rxi28XwOW8ux6IiEbRCGGGqCtdAIg4FQCbW20k9RsT4y3gJlFug=="
},
"balanced-match": {
"version": "1.0.0",
@@ -851,6 +1034,7 @@
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz",
"integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==",
+ "dev": true,
"requires": {
"object-keys": "^1.0.12"
}
@@ -942,6 +1126,7 @@
"version": "1.16.0",
"resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.16.0.tgz",
"integrity": "sha512-xdQnfykZ9JMEiasTAJZJdMWCQ1Vm00NBw79/AWi7ELfZuuPCSOMDZbT9mkOfSctVtfhb+sAAzrm+j//GjjLHLg==",
+ "dev": true,
"requires": {
"es-to-primitive": "^1.2.0",
"function-bind": "^1.1.1",
@@ -959,6 +1144,7 @@
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.0.tgz",
"integrity": "sha512-qZryBOJjV//LaxLTV6UC//WewneB3LcXOL9NP++ozKVXsIIIpm/2c13UDiD9Jp2eThsecw9m3jPqDwTyobcdbg==",
+ "dev": true,
"requires": {
"is-callable": "^1.1.4",
"is-date-object": "^1.0.1",
@@ -1087,42 +1273,54 @@
}
},
"eslint-import-resolver-node": {
- "version": "0.3.2",
- "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.2.tgz",
- "integrity": "sha512-sfmTqJfPSizWu4aymbPr4Iidp5yKm8yDkHp+Ir3YiTHiiDfxh69mOUsmiqW6RZ9zRXFaF64GtYmN7e+8GHBv6Q==",
+ "version": "0.3.3",
+ "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.3.tgz",
+ "integrity": "sha512-b8crLDo0M5RSe5YG8Pu2DYBj71tSB6OvXkfzwbJU2w7y8P4/yo0MyF8jU26IEuEuHF2K5/gcAJE3LhQGqBBbVg==",
"dev": true,
"requires": {
"debug": "^2.6.9",
- "resolve": "^1.5.0"
+ "resolve": "^1.13.1"
+ },
+ "dependencies": {
+ "resolve": {
+ "version": "1.15.1",
+ "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz",
+ "integrity": "sha512-84oo6ZTtoTUpjgNEr5SJyzQhzL72gaRodsSfyxC/AXRvwu0Yse9H8eF9IpGo7b8YetZhlI6v7ZQ6bKBFV/6S7w==",
+ "dev": true,
+ "requires": {
+ "path-parse": "^1.0.6"
+ }
+ }
}
},
"eslint-module-utils": {
- "version": "2.4.1",
- "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.4.1.tgz",
- "integrity": "sha512-H6DOj+ejw7Tesdgbfs4jeS4YMFrT8uI8xwd1gtQqXssaR0EQ26L+2O/w6wkYFy2MymON0fTwHmXBvvfLNZVZEw==",
+ "version": "2.5.2",
+ "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.5.2.tgz",
+ "integrity": "sha512-LGScZ/JSlqGKiT8OC+cYRxseMjyqt6QO54nl281CK93unD89ijSeRV6An8Ci/2nvWVKe8K/Tqdm75RQoIOCr+Q==",
"dev": true,
"requires": {
- "debug": "^2.6.8",
+ "debug": "^2.6.9",
"pkg-dir": "^2.0.0"
}
},
"eslint-plugin-import": {
- "version": "2.18.2",
- "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.18.2.tgz",
- "integrity": "sha512-5ohpsHAiUBRNaBWAF08izwUGlbrJoJJ+W9/TBwsGoR1MnlgfwMIKrFeSjWbt6moabiXW9xNvtFz+97KHRfI4HQ==",
+ "version": "2.20.1",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.20.1.tgz",
+ "integrity": "sha512-qQHgFOTjguR+LnYRoToeZWT62XM55MBVXObHM6SKFd1VzDcX/vqT1kAz8ssqigh5eMj8qXcRoXXGZpPP6RfdCw==",
"dev": true,
"requires": {
"array-includes": "^3.0.3",
+ "array.prototype.flat": "^1.2.1",
"contains-path": "^0.1.0",
"debug": "^2.6.9",
"doctrine": "1.5.0",
"eslint-import-resolver-node": "^0.3.2",
- "eslint-module-utils": "^2.4.0",
+ "eslint-module-utils": "^2.4.1",
"has": "^1.0.3",
"minimatch": "^3.0.4",
"object.values": "^1.1.0",
"read-pkg-up": "^2.0.0",
- "resolve": "^1.11.0"
+ "resolve": "^1.12.0"
},
"dependencies": {
"doctrine": {
@@ -1256,14 +1454,14 @@
"dev": true
},
"fast-deep-equal": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz",
- "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk="
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.1.tgz",
+ "integrity": "sha512-8UEa58QDLauDNfpbrX55Q9jrGHThw2ZMdOky5Gl1CDtVeJDPVrG4Jxx1N8jw2gkWaff5UUuX1KJd+9zGe2B+ZA=="
},
"fast-json-stable-stringify": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz",
- "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I="
+ "version": "2.1.0",
+ "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
+ "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="
},
"fast-levenshtein": {
"version": "2.0.6",
@@ -1448,7 +1646,8 @@
"function-bind": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz",
- "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A=="
+ "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==",
+ "dev": true
},
"functional-red-black-tree": {
"version": "1.0.1",
@@ -1568,6 +1767,7 @@
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz",
"integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==",
+ "dev": true,
"requires": {
"function-bind": "^1.1.1"
}
@@ -1589,7 +1789,8 @@
"has-symbols": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.0.tgz",
- "integrity": "sha1-uhqPGvKg/DllD1yFA2dwQSIGO0Q="
+ "integrity": "sha1-uhqPGvKg/DllD1yFA2dwQSIGO0Q=",
+ "dev": true
},
"has-unicode": {
"version": "2.0.1",
@@ -1815,12 +2016,14 @@
"is-callable": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.4.tgz",
- "integrity": "sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA=="
+ "integrity": "sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA==",
+ "dev": true
},
"is-date-object": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.1.tgz",
- "integrity": "sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY="
+ "integrity": "sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY=",
+ "dev": true
},
"is-fullwidth-code-point": {
"version": "1.0.0",
@@ -1847,6 +2050,7 @@
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.4.tgz",
"integrity": "sha1-VRdIm1RwkbCTDglWVM7SXul+lJE=",
+ "dev": true,
"requires": {
"has": "^1.0.1"
}
@@ -1857,10 +2061,17 @@
"integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=",
"dev": true
},
+ "is-string": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz",
+ "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==",
+ "dev": true
+ },
"is-symbol": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.2.tgz",
"integrity": "sha512-HS8bZ9ox60yCJLH9snBpIwv9pYUAkcuLhSA1oero1UB5y9aiQpRA8y2ex945AOtCZL1lJDeIk3G5LthswI46Lw==",
+ "dev": true,
"requires": {
"has-symbols": "^1.0.0"
}
@@ -2259,16 +2470,16 @@
}
},
"mime-db": {
- "version": "1.40.0",
- "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.40.0.tgz",
- "integrity": "sha512-jYdeOMPy9vnxEqFRRo6ZvTZ8d9oPb+k18PKoYNYUe2stVEBPPwsln/qWzdbmaIvnhZ9v2P+CuecK+fpUfsV2mA=="
+ "version": "1.43.0",
+ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.43.0.tgz",
+ "integrity": "sha512-+5dsGEEovYbT8UY9yD7eE4XTc4UwJ1jBYlgaQQF38ENsKR3wj/8q8RFZrF9WIZpB2V1ArTVFUva8sAul1NzRzQ=="
},
"mime-types": {
- "version": "2.1.24",
- "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.24.tgz",
- "integrity": "sha512-WaFHS3MCl5fapm3oLxU4eYDw77IQM2ACcxQ9RIxfaC3ooc6PFuBMGZZsYpvoXS5D5QTWPieo1jjLdAm3TBP3cQ==",
+ "version": "2.1.26",
+ "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.26.tgz",
+ "integrity": "sha512-01paPWYgLrkqAyrlDorC1uDwl2p3qZT7yl806vW7DvDoxwXi46jsjFbg+WdwotBIk6/MbEhO/dh5aZ5sNj/dWQ==",
"requires": {
- "mime-db": "1.40.0"
+ "mime-db": "1.43.0"
}
},
"mimic-fn": {
@@ -2389,9 +2600,9 @@
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
},
"mustache": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/mustache/-/mustache-3.1.0.tgz",
- "integrity": "sha512-3Bxq1R5LBZp7fbFPZzFe5WN4s0q3+gxZaZuZVY+QctYJiCiVgXHOTIC0/HgZuOPFt/6BQcx5u0H2CUOxT/RoGQ=="
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/mustache/-/mustache-4.0.0.tgz",
+ "integrity": "sha512-FJgjyX/IVkbXBXYUwH+OYwQKqWpFPLaLVESd70yHjSDunwzV2hZOoTBvPf4KLoxesUzzyfTH6F784Uqd7Wm5yA=="
},
"mute-stream": {
"version": "0.0.7",
@@ -2664,7 +2875,8 @@
"object-inspect": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.6.0.tgz",
- "integrity": "sha512-GJzfBZ6DgDAmnuaM3104jR4s1Myxr3Y3zfIyN4z3UdqN69oSRacNK8UhnobDdC+7J2AHCjGwxQubNJfE70SXXQ=="
+ "integrity": "sha512-GJzfBZ6DgDAmnuaM3104jR4s1Myxr3Y3zfIyN4z3UdqN69oSRacNK8UhnobDdC+7J2AHCjGwxQubNJfE70SXXQ==",
+ "dev": true
},
"object-is": {
"version": "1.0.1",
@@ -2675,7 +2887,8 @@
"object-keys": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz",
- "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA=="
+ "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==",
+ "dev": true
},
"object.assign": {
"version": "4.1.0",
@@ -2701,25 +2914,95 @@
"has": "^1.0.3"
}
},
- "object.getownpropertydescriptors": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz",
- "integrity": "sha1-h1jIRvW0B62rDyNuCYbxSwUcqhY=",
- "requires": {
- "define-properties": "^1.1.2",
- "es-abstract": "^1.5.1"
- }
- },
"object.values": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.0.tgz",
- "integrity": "sha512-8mf0nKLAoFX6VlNVdhGj31SVYpaNFtUnuoOXWyFEstsWRgU837AK+JYM0iAxwkSzGRbwn8cbFmgbyxj1j4VbXg==",
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.1.tgz",
+ "integrity": "sha512-WTa54g2K8iu0kmS/us18jEmdv1a4Wi//BZ/DTVYEcH0XhLM5NYdpDHja3gt57VrZLcNAO2WGA+KpWsDBaHt6eA==",
"dev": true,
"requires": {
"define-properties": "^1.1.3",
- "es-abstract": "^1.12.0",
+ "es-abstract": "^1.17.0-next.1",
"function-bind": "^1.1.1",
"has": "^1.0.3"
+ },
+ "dependencies": {
+ "es-abstract": {
+ "version": "1.17.4",
+ "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.4.tgz",
+ "integrity": "sha512-Ae3um/gb8F0mui/jPL+QiqmglkUsaQf7FwBEHYIFkztkneosu9imhqHpBzQ3h1vit8t5iQ74t6PEVvphBZiuiQ==",
+ "dev": true,
+ "requires": {
+ "es-to-primitive": "^1.2.1",
+ "function-bind": "^1.1.1",
+ "has": "^1.0.3",
+ "has-symbols": "^1.0.1",
+ "is-callable": "^1.1.5",
+ "is-regex": "^1.0.5",
+ "object-inspect": "^1.7.0",
+ "object-keys": "^1.1.1",
+ "object.assign": "^4.1.0",
+ "string.prototype.trimleft": "^2.1.1",
+ "string.prototype.trimright": "^2.1.1"
+ }
+ },
+ "es-to-primitive": {
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz",
+ "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==",
+ "dev": true,
+ "requires": {
+ "is-callable": "^1.1.4",
+ "is-date-object": "^1.0.1",
+ "is-symbol": "^1.0.2"
+ }
+ },
+ "has-symbols": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz",
+ "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==",
+ "dev": true
+ },
+ "is-callable": {
+ "version": "1.1.5",
+ "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz",
+ "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==",
+ "dev": true
+ },
+ "is-regex": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz",
+ "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==",
+ "dev": true,
+ "requires": {
+ "has": "^1.0.3"
+ }
+ },
+ "object-inspect": {
+ "version": "1.7.0",
+ "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz",
+ "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==",
+ "dev": true
+ },
+ "string.prototype.trimleft": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.1.tgz",
+ "integrity": "sha512-iu2AGd3PuP5Rp7x2kEZCrB2Nf41ehzh+goo8TV7z8/XDBbsvc6HQIlUl9RjkZ4oyrW1XM5UwlGl1oVEaDjg6Ag==",
+ "dev": true,
+ "requires": {
+ "define-properties": "^1.1.3",
+ "function-bind": "^1.1.1"
+ }
+ },
+ "string.prototype.trimright": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.1.tgz",
+ "integrity": "sha512-qFvWL3/+QIgZXVmJBfpHmxLB7xsUXz6HsUmP8+5dRaC3Q7oKUv9Vo6aMCRZC1smrtyECFsIT30PqBJ1gTjAs+g==",
+ "dev": true,
+ "requires": {
+ "define-properties": "^1.1.3",
+ "function-bind": "^1.1.1"
+ }
+ }
}
},
"once": {
@@ -2993,9 +3276,9 @@
"dev": true
},
"psl": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/psl/-/psl-1.4.0.tgz",
- "integrity": "sha512-HZzqCGPecFLyoRj5HLfuDSKYTJkAfB5thKBIkRHtGjWwY7p1dAyveIbXIq4tO0KYfDF2tHqPUgY9SDnGm00uFw=="
+ "version": "1.7.0",
+ "resolved": "https://registry.npmjs.org/psl/-/psl-1.7.0.tgz",
+ "integrity": "sha512-5NsSEDv8zY70ScRnOTn7bK7eanl2MvFrOrS/R6x+dBt5g1ghnj9Zv90kO8GwT8gxcu2ANyFprnFYB85IogIJOQ=="
},
"pump": {
"version": "3.0.0",
@@ -3101,9 +3384,9 @@
}
},
"request": {
- "version": "2.88.0",
- "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz",
- "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==",
+ "version": "2.88.2",
+ "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz",
+ "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==",
"requires": {
"aws-sign2": "~0.7.0",
"aws4": "^1.8.0",
@@ -3112,7 +3395,7 @@
"extend": "~3.0.2",
"forever-agent": "~0.6.1",
"form-data": "~2.3.2",
- "har-validator": "~5.1.0",
+ "har-validator": "~5.1.3",
"http-signature": "~1.2.0",
"is-typedarray": "~1.0.0",
"isstream": "~0.1.2",
@@ -3122,7 +3405,7 @@
"performance-now": "^2.1.0",
"qs": "~6.5.2",
"safe-buffer": "^5.1.2",
- "tough-cookie": "~2.4.3",
+ "tough-cookie": "~2.5.0",
"tunnel-agent": "^0.6.0",
"uuid": "^3.3.2"
}
@@ -3206,9 +3489,9 @@
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"sax": {
- "version": "1.2.4",
- "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
- "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz",
+ "integrity": "sha1-e45lYZCyKOgaZq6nSEgNgozS03o="
},
"semver": {
"version": "5.7.1",
@@ -3380,18 +3663,18 @@
"dev": true
},
"ssh2": {
- "version": "0.8.5",
- "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-0.8.5.tgz",
- "integrity": "sha512-TkvzxSYYUSQ8jb//HbHnJVui4fVEW7yu/zwBxwro/QaK2EGYtwB+8gdEChwHHuj142c5+250poMC74aJiwApPw==",
+ "version": "0.8.7",
+ "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-0.8.7.tgz",
+ "integrity": "sha512-/u1BO12kb0lDVxJXejWB9pxyF3/ncgRqI9vPCZuPzo05pdNDzqUeQRavScwSPsfMGK+5H/VRqp1IierIx0Bcxw==",
"dev": true,
"requires": {
- "ssh2-streams": "~0.4.4"
+ "ssh2-streams": "~0.4.8"
}
},
"ssh2-streams": {
- "version": "0.4.6",
- "resolved": "https://registry.npmjs.org/ssh2-streams/-/ssh2-streams-0.4.6.tgz",
- "integrity": "sha512-jXq/nk2K82HuueO9CTCdas/a0ncX3fvYzEPKt1+ftKwE5RXTX25GyjcpjBh2lwVUYbk0c9yq6cBczZssWmU3Tw==",
+ "version": "0.4.8",
+ "resolved": "https://registry.npmjs.org/ssh2-streams/-/ssh2-streams-0.4.8.tgz",
+ "integrity": "sha512-auxXfgYySz2vYw7TMU7PK7vFI7EPvhvTH8/tZPgGaWocK4p/vwCMiV3icz9AEkb0R40kOKZtFtqYIxDJyJiytw==",
"dev": true,
"requires": {
"asn1": "~0.2.0",
@@ -3442,6 +3725,7 @@
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.0.tgz",
"integrity": "sha512-FJ6b7EgdKxxbDxc79cOlok6Afd++TTs5szo+zJTUyow3ycrRfJVE2pq3vcN53XexvKZu/DJMDfeI/qMiZTrjTw==",
+ "dev": true,
"requires": {
"define-properties": "^1.1.3",
"function-bind": "^1.1.1"
@@ -3451,6 +3735,7 @@
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.0.tgz",
"integrity": "sha512-fXZTSV55dNBwv16uw+hh5jkghxSnc5oHq+5K/gXgizHwAvMetdAJlHqqoFC1FSDVPYWLkAKl2cxpUT41sV7nSg==",
+ "dev": true,
"requires": {
"define-properties": "^1.1.3",
"function-bind": "^1.1.1"
@@ -3731,19 +4016,12 @@
"dev": true
},
"tough-cookie": {
- "version": "2.4.3",
- "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz",
- "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==",
+ "version": "2.5.0",
+ "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz",
+ "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==",
"requires": {
- "psl": "^1.1.24",
- "punycode": "^1.4.1"
- },
- "dependencies": {
- "punycode": {
- "version": "1.4.1",
- "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz",
- "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4="
- }
+ "psl": "^1.1.28",
+ "punycode": "^2.1.1"
}
},
"traverse": {
@@ -3839,15 +4117,6 @@
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8="
},
- "util.promisify": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/util.promisify/-/util.promisify-1.0.0.tgz",
- "integrity": "sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA==",
- "requires": {
- "define-properties": "^1.1.2",
- "object.getownpropertydescriptors": "^2.0.3"
- }
- },
"uuid": {
"version": "3.3.3",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.3.tgz",
@@ -4011,19 +4280,18 @@
}
},
"xml2js": {
- "version": "0.4.22",
- "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.22.tgz",
- "integrity": "sha512-MWTbxAQqclRSTnehWWe5nMKzI3VmJ8ltiJEco8akcC6j3miOhjjfzKum5sId+CWhfxdOs/1xauYr8/ZDBtQiRw==",
+ "version": "0.4.19",
+ "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz",
+ "integrity": "sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==",
"requires": {
"sax": ">=0.6.0",
- "util.promisify": "~1.0.0",
- "xmlbuilder": "~11.0.0"
+ "xmlbuilder": "~9.0.1"
}
},
"xmlbuilder": {
- "version": "11.0.1",
- "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz",
- "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA=="
+ "version": "9.0.7",
+ "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz",
+ "integrity": "sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0="
},
"y18n": {
"version": "4.0.0",
diff --git a/package.json b/package.json
index 5713cd37..ce32f3dc 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "f5-telemetry",
- "version": "1.9.0",
+ "version": "1.10.0-2",
"author": "F5 Networks",
"license": "Apache-2.0",
"repository": {
@@ -12,7 +12,7 @@
"install-test": "npm install --no-optional",
"lint": "eslint src test",
"test-functional": "mocha \"./test/functional/testRunner.js\" --opts ./test/functional/.mocha.opts",
- "test-only": "mocha --recursive \"./test/unit/**/*.js\" --opts ./test/unit/.mocha.opts",
+ "test-only": "mocha --opts ./test/unit/.mocha.opts",
"test": "nyc --all npm run test-only",
"build": "./scripts/build/buildRpm.sh"
},
@@ -32,33 +32,30 @@
]
},
"dependencies": {
- "@f5devcentral/f5-teem": "^1.1.0",
- "ajv": "^6.5.4",
+ "@f5devcentral/f5-teem": "^1.2.0",
+ "ajv": "^6.12.0",
"ajv-async": "^1.0.1",
- "aws-sdk": "^2.369.0",
+ "aws-sdk": "^2.621.0",
"commander": "^2.19.0",
"deep-diff": "^1.0.2",
"elasticsearch": "^15.3.0",
"jsonwebtoken": "^8.5.1",
"kafka-node": "^2.6.1",
- "mustache": "^3.0.0",
+ "mustache": "^4.0.0",
"node-statsd": "0.1.1",
- "request": "^2.88.0"
+ "request": "^2.88.2"
},
"devDependencies": {
"@f5devcentral/eslint-config-f5-atg": "latest",
"chai": "^4.2.0",
"chai-as-promised": "^7.1.1",
- "eslint": "^5.16.0",
- "eslint-config-airbnb-base": "^13.1.0",
- "eslint-plugin-import": "^2.17.3",
- "icrdk": "git://github.com/f5devcentral/f5-icontrollx-dev-kit#master",
+ "icrdk": "git://github.com/f5devcentral/f5-icontrollx-dev-kit.git#master",
"mocha": "^5.2.0",
"nock": "10.0.0",
"nyc": "^14.1.1",
"proxyquire": "^2.1.3",
"sinon": "^7.4.1",
- "ssh2": "^0.8.2",
+ "ssh2": "^0.8.7",
"winston": "^2.4.4"
},
"eslintConfig": {
diff --git a/scripts/functional-testing/setup.sh b/scripts/functional-testing/setup.sh
new file mode 100644
index 00000000..77818f2b
--- /dev/null
+++ b/scripts/functional-testing/setup.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+
+set -e
+
+# BIG-IP deployment tool variables stored in GitLab:
+# CICD_AUTH_OS_USERNAME - VIO user
+# CICD_AUTH_OS_PASSWORD - VIO password
+# CICD_AUTH_OS_PROJECT - VIO project
+# or
+# CICD_AUTH_OS_TOKEN - VIO auth token
+# CICD_AUTH_OS_PROJECT - VIO project
+
+# BIG-IP deployment tool variables:
+export CUSTOM_DECLARATION="yes"
+export PROJECT_DECLARATION="${CI_PROJECT_DIR}/test/functional/deployment/declaration.yml"
+export PROJECT_NAME=$([ "${CICD_PROJECT_NAME}" == "" ] && echo "test_functional_harness" || echo "${CICD_PROJECT_NAME}")
+export PROJECT_DIR="/root/deploy-projects/${PROJECT_NAME}"
+
+echo "CUSTOM_DECLARATION = ${CUSTOM_DECLARATION}"
+echo "PROJECT_NAME = ${PROJECT_NAME}"
+echo "PROJECT_DIR = ${PROJECT_DIR}"
+echo "PROJECT_DECLARATION = ${PROJECT_DECLARATION}"
+
+declaration=$(sed "s/_DEPLOYMENT_NAME_/${PROJECT_NAME}/g" "${PROJECT_DECLARATION}")
+echo "$declaration" > "${PROJECT_DECLARATION}"
+cat "${PROJECT_DECLARATION}"
+
+# ready to start deployment
+cd /root/cicd-bigip-deploy
+make configure
+make printvars
+make setup
+
+# for debugging purpose only
+ls -als ${PROJECT_DIR}
+
+# copy info about deployed harness to project's folder to create artifcat
+cp ${PROJECT_DIR}/harness_facts_flat.json ${CI_PROJECT_DIR}/harness_facts_flat.json
diff --git a/scripts/functional-testing/teardown.sh b/scripts/functional-testing/teardown.sh
new file mode 100644
index 00000000..2cc98785
--- /dev/null
+++ b/scripts/functional-testing/teardown.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+
+set -e
+
+# BIG-IP deployment tool variables stored in GitLab:
+# CICD_AUTH_OS_USERNAME - VIO user
+# CICD_AUTH_OS_PASSWORD - VIO password
+# CICD_AUTH_OS_PROJECT - VIO project
+# or
+# CICD_AUTH_OS_TOKEN - VIO auth token
+# CICD_AUTH_OS_PROJECT - VIO project
+
+# BIG-IP deployment tool variables:
+export CUSTOM_DECLARATION="yes"
+export PROJECT_DECLARATION="${CI_PROJECT_DIR}/test/functional/deployment/declaration.yml"
+export PROJECT_NAME=$([ "${CICD_PROJECT_NAME}" == "" ] && echo "test_functional_harness" || echo "${CICD_PROJECT_NAME}")
+export PROJECT_DIR="/root/deploy-projects/${PROJECT_NAME}"
+
+echo "CUSTOM_DECLARATION = ${CUSTOM_DECLARATION}"
+echo "PROJECT_NAME = ${PROJECT_NAME}"
+echo "PROJECT_DIR = ${PROJECT_DIR}"
+echo "PROJECT_DECLARATION = ${PROJECT_DECLARATION}"
+
+declaration=$(sed "s/_DEPLOYMENT_NAME_/${PROJECT_NAME}/g" "${PROJECT_DECLARATION}")
+echo "$declaration" > "${PROJECT_DECLARATION}"
+cat "${PROJECT_DECLARATION}"
+
+# ready to start deployment
+cd /root/cicd-bigip-deploy
+make configure
+make printvars
+make teardown
diff --git a/scripts/schema-build.js b/scripts/schema-build.js
index 21979d83..7625df15 100644
--- a/scripts/schema-build.js
+++ b/scripts/schema-build.js
@@ -8,6 +8,7 @@
'use strict';
+const assert = require('assert');
const fs = require('fs');
// const base = require('../src/schema/latest/base_schema.json');
@@ -15,7 +16,26 @@ const fs = require('fs');
const SCHEMA_DIR = `${__dirname}/../src/schema/latest`;
const outputFile = `${__dirname}/../dist/ts.schema.json`;
-const safeTraverse = (p, o) => p.reduce((xs, x) => (xs && xs[x] ? xs[x] : null), o);
+const safeTraverse = (pathArray, parentObject) => pathArray.reduce(
+ (curObj, curPath) => (typeof curObj !== 'undefined' && typeof curObj[curPath] !== 'undefined' ? curObj[curPath] : undefined),
+ parentObject
+);
+
+const normalizeReference = (ref, schemaId) => {
+ ref = ref.startsWith('#') ? `${schemaId}${ref}` : ref;
+ return ref.split('#').join('');
+};
+
+const getReferenceValue = (ref, schemaId, schemaMap) => {
+ const normalizedRef = normalizeReference(ref, schemaId);
+ const refParts = normalizedRef.split('/');
+ const definition = safeTraverse(refParts, schemaMap);
+ assert.notStrictEqual(definition, undefined, `Unable to dereference '${ref}' from schema with id '${schemaId}'`);
+ return {
+ definition,
+ schemaId: refParts[0]
+ };
+};
function writeSchema(name, data) {
return new Promise((resolve, reject) => {
@@ -27,58 +47,65 @@ function writeSchema(name, data) {
}
function combineSchemas() {
- const paths = fs.readdirSync(`${SCHEMA_DIR}/`)
+ const base = { definitions: {} };
+ const schemaMap = {};
+
+ fs.readdirSync(`${SCHEMA_DIR}/`)
.filter(name => !(name.includes('draft')) && name.endsWith('schema.json'))
- .map(fileName => `${SCHEMA_DIR}/${fileName}`);
+ .map(fileName => `${SCHEMA_DIR}/${fileName}`)
+ .forEach((path) => {
+ const schema = JSON.parse(fs.readFileSync(path, 'utf8'));
+ assert.notStrictEqual(schema.$id, undefined, `Schema at path '${path}' should have $id property`);
+ schemaMap[schema.$id] = schema;
+ });
- const base = { definitions: {} };
- const contents = [];
- const defs = {};
-
- paths.forEach((path) => {
- const content = JSON.parse(fs.readFileSync(path, 'utf8'));
- contents.push(content);
- if (content.definitions) {
- Object.assign(defs, content.definitions);
+ Object.keys(schemaMap).forEach((schemaId) => {
+ const schema = schemaMap[schemaId];
+ if (!schema.allOf) {
+ return;
}
- });
- contents.forEach((content) => {
- if (content.allOf) {
- content.allOf.forEach((tsClass) => {
- const classType = safeTraverse(['if', 'properties', 'class', 'const'], tsClass);
-
- if (classType) {
- const tmp = {};
- const propKeys = Object.keys(tsClass.then.properties);
- propKeys.forEach((propKey) => {
- const prop = tsClass.then.properties[propKey];
-
- // dereference all values
- const ref = prop.$ref;
- if (ref) {
- const def = ref.split('/').pop();
- tsClass.then.properties[propKey] = defs[def];
- } else if (prop.allOf) {
- const def = prop.allOf[0].$ref.split('/').pop();
- tsClass.then.properties[propKey] = defs[def];
- } else if (prop.oneOf) {
- const def = prop.oneOf[1].allOf[1].$ref.split('/').pop();
- tsClass.then.properties[propKey] = defs[def];
- }
-
- // inherit default value on top of the definition
- if (prop.$ref || prop.allOf || prop.oneOf) {
- tsClass.then.properties[propKey].default = prop.default;
- }
- });
-
- tmp[classType] = tsClass.then;
- tmp[classType].description = tsClass.description;
- base.definitions = Object.assign(base.definitions, tmp);
+ schema.allOf.forEach((tsClass) => {
+ const classType = safeTraverse(['if', 'properties', 'class', 'const'], tsClass);
+ if (!classType) {
+ return;
+ }
+
+ const tmp = {};
+ const properties = tsClass.then.properties;
+
+ Object.keys(properties).forEach((propKey) => {
+ const prop = properties[propKey];
+ // dereference all values
+ if (prop.$ref) {
+ properties[propKey] = getReferenceValue(prop.$ref, schemaId, schemaMap).definition;
+ } else if (prop.allOf) {
+ properties[propKey] = getReferenceValue(prop.allOf[0].$ref, schemaId, schemaMap).definition;
+ } else if (prop.oneOf) {
+ let value;
+ if (propKey === 'systemPoller') {
+ // Telemetry_System -> systemPoller property -> ref to systemPollerObjectRef -> systemPoller
+ value = getReferenceValue(prop.oneOf[1].$ref, schemaId, schemaMap);
+ value = getReferenceValue(value.definition.allOf[1].$ref, value.schemaId, schemaMap).definition;
+ } else if (propKey === 'iHealthPoller') {
+ // Telemetry_System -> iHealthPoller property -> ref to iHealthPollerRef -> iHealthPoller
+ value = getReferenceValue(prop.oneOf[1].$ref, schemaId, schemaMap);
+ value = getReferenceValue(value.definition.allOf[1].$ref, value.schemaId, schemaMap).definition;
+ } else {
+ value = getReferenceValue(prop.oneOf[1].allOf[1].$ref, schemaId, schemaMap).definition;
+ }
+ properties[propKey] = value;
+ }
+ // inherit default value on top of the definition
+ if (prop.$ref || prop.allOf || prop.oneOf) {
+ properties[propKey].default = prop.default;
}
});
- }
+
+ tmp[classType] = tsClass.then;
+ tmp[classType].description = tsClass.description;
+ base.definitions = Object.assign(base.definitions, tmp);
+ });
});
return writeSchema(outputFile, base);
}
diff --git a/scripts/schema-to-rst.js b/scripts/schema-to-rst.js
index 8a70f80c..da85bb53 100644
--- a/scripts/schema-to-rst.js
+++ b/scripts/schema-to-rst.js
@@ -242,7 +242,7 @@ function getProperties(definition, props, defName) {
function conditionalDescription(definition, props, defName) {
if ((definition.if && definition.if.properties)
- || (definition.else && definition.else.properties)) {
+ || (definition.else && definition.else.properties && !definition.if.required)) {
const conditionalProps = definition.then;
if (conditionalProps.properties) {
const conditionalKey = Object.keys(definition.if.properties)[0];
diff --git a/src/lib/config.js b/src/lib/config.js
index df19c321..f57a8cdd 100644
--- a/src/lib/config.js
+++ b/src/lib/config.js
@@ -14,10 +14,10 @@ const setupAsync = require('ajv-async');
const EventEmitter = require('events');
const TeemDevice = require('@f5devcentral/f5-teem').Device;
-const logger = require('./logger.js');
-const util = require('./util.js');
-const deviceUtil = require('./deviceUtil.js');
-const persistentStorage = require('./persistentStorage.js').persistentStorage;
+const logger = require('./logger');
+const util = require('./util');
+const deviceUtil = require('./deviceUtil');
+const persistentStorage = require('./persistentStorage').persistentStorage;
const baseSchema = require('../schema/latest/base_schema.json');
const controlsSchema = require('../schema/latest/controls_schema.json');
@@ -27,11 +27,12 @@ const systemPollerSchema = require('../schema/latest/system_poller_schema.json')
const listenerSchema = require('../schema/latest/listener_schema.json');
const consumerSchema = require('../schema/latest/consumer_schema.json');
const iHealthPollerSchema = require('../schema/latest/ihealth_poller_schema.json');
+const endpointsSchema = require('../schema/latest/endpoints_schema.json');
-const customKeywords = require('./customKeywords.js');
-const CONTROLS_CLASS_NAME = require('./constants.js').CONTROLS_CLASS_NAME;
-const CONTROLS_PROPERTY_NAME = require('./constants.js').CONTROLS_PROPERTY_NAME;
-const VERSION = require('./constants.js').VERSION;
+const customKeywords = require('./customKeywords');
+const CONTROLS_CLASS_NAME = require('./constants').CONFIG_CLASSES.CONTROLS_CLASS_NAME;
+const CONTROLS_PROPERTY_NAME = require('./constants').CONTROLS_PROPERTY_NAME;
+const VERSION = require('./constants').VERSION;
const PERSISTENT_STORAGE_KEY = 'config';
const BASE_CONFIG = {
@@ -52,7 +53,7 @@ function ConfigWorker() {
name: 'Telemetry Streaming',
version: VERSION
};
- this.teemDevice = new TeemDevice(assetInfo, 'staging');
+ this.teemDevice = new TeemDevice(assetInfo);
}
nodeUtil.inherits(ConfigWorker, EventEmitter);
@@ -118,6 +119,7 @@ ConfigWorker.prototype.loadConfig = function () {
* @returns {Object} Promise which is resolved once state is saved
*/
ConfigWorker.prototype.saveConfig = function (config) {
+ // persistentStorage.set will make copy of data
return persistentStorage.set(PERSISTENT_STORAGE_KEY, config)
.then(() => logger.debug('Application config saved'))
.catch((err) => {
@@ -133,13 +135,14 @@ ConfigWorker.prototype.saveConfig = function (config) {
* @returns {Promise} Promise resolved with config
*/
ConfigWorker.prototype.getConfig = function () {
+ // persistentStorage.get returns data copy
return persistentStorage.get(PERSISTENT_STORAGE_KEY)
.then((data) => {
if (typeof data === 'undefined') {
logger.debug(`persistentStorage did not have a value for ${PERSISTENT_STORAGE_KEY}`);
}
return (typeof data === 'undefined'
- || typeof data.parsed === 'undefined') ? BASE_CONFIG : data;
+ || typeof data.parsed === 'undefined') ? util.deepCopy(BASE_CONFIG) : data;
});
};
@@ -158,7 +161,8 @@ ConfigWorker.prototype.compileSchema = function () {
systemPoller: systemPollerSchema,
listener: listenerSchema,
consumer: consumerSchema,
- iHealthPoller: iHealthPollerSchema
+ iHealthPoller: iHealthPollerSchema,
+ endpoints: endpointsSchema
};
const keywords = customKeywords;
diff --git a/src/lib/constants.js b/src/lib/constants.js
index cb73a566..6d841c83 100644
--- a/src/lib/constants.js
+++ b/src/lib/constants.js
@@ -8,7 +8,27 @@
'use strict';
-const VERSION = '1.9.0';
+const packageVersionInfo = (function () {
+ let packageVersion = '0.0.0-0';
+ ['../package.json', '../../package.json'].some((fname) => {
+ try {
+ packageVersion = require(fname).version; // eslint-disable-line global-require,import/no-dynamic-require
+ delete require.cache[require.resolve(fname)];
+ } catch (err) {
+ return false;
+ }
+ return true;
+ });
+ packageVersion = packageVersion.split('-');
+ if (packageVersion.length === 1) {
+ // push RELEASE number
+ packageVersion.push('1');
+ }
+ return packageVersion;
+}());
+
+const VERSION = packageVersionInfo[0];
+const RELEASE = packageVersionInfo[1];
/**
@@ -47,13 +67,19 @@ WEEKDAY_TO_DAY_NAME[7] = 'sunday';
module.exports = {
+ RELEASE,
VERSION,
- BIG_IP_DEVICE_TYPE: 'BIG-IP',
- CONSUMERS_CLASS_NAME: 'Telemetry_Consumer',
+ CONFIG_CLASSES: {
+ CONSUMER_CLASS_NAME: 'Telemetry_Consumer',
+ CONTROLS_CLASS_NAME: 'Controls',
+ ENDPOINTS_CLASS_NAME: 'Telemetry_Endpoints',
+ EVENT_LISTENER_CLASS_NAME: 'Telemetry_Listener',
+ IHEALTH_POLLER_CLASS_NAME: 'Telemetry_iHealth_Poller',
+ SYSTEM_CLASS_NAME: 'Telemetry_System',
+ SYSTEM_POLLER_CLASS_NAME: 'Telemetry_System_Poller'
+ },
CONSUMERS_DIR: './consumers',
- CONTAINER_DEVICE_TYPE: 'Container',
- CONTROLS_CLASS_NAME: 'Controls',
CONTROLS_PROPERTY_NAME: 'controls',
DAY_NAME_TO_WEEKDAY,
DEVICE_DEFAULT_PORT: 8100,
@@ -64,8 +90,11 @@ module.exports = {
DEVICE_REST_MAMD_DIR: '/var/config/rest/madm',
DEVICE_REST_MADM_URI: '/mgmt/shared/file-transfer/madm/',
DEVICE_TMP_DIR: '/shared/tmp',
+ DEVICE_TYPE: {
+ BIG_IP: 'BIG-IP',
+ CONTAINER: 'Container'
+ },
DEFAULT_EVENT_LISTENER_PORT: 6514,
- EVENT_LISTENER_CLASS_NAME: 'Telemetry_Listener',
EVENT_TYPES: {
DEFAULT: 'event',
AVR_EVENT: 'AVR',
@@ -79,20 +108,19 @@ module.exports = {
SYSTEM_POLLER: 'systemInfo',
IHEALTH_POLLER: 'ihealthInfo'
},
+ HTTP_REQUEST: {
+ DEFAULT_PORT: 80,
+ DEFAULT_PROTOCOL: 'http'
+ },
IHEALTH_API_LOGIN: 'https://api.f5.com/auth/pub/sso/login/ihealth-api',
IHEALTH_API_UPLOAD: 'https://ihealth-api.f5.com/qkview-analyzer/api/qkviews',
- IHEALTH_POLLER_CLASS_NAME: 'Telemetry_iHealth_Poller',
LOCAL_HOST: 'localhost',
PASSPHRASE_CIPHER_TEXT: 'cipherText',
PASSPHRASE_ENVIRONMENT_VAR: 'environmentVar',
PORT_TO_PROTO,
PROTO_TO_PORT,
- QKVIEW_CMD_LOCAL_TIMEOUT: 1 * 60 * 60 * 1000, // 1 hour in miliseconds
- REQUEST_DEFAULT_PORT: 80,
- REQUEST_DEFAULT_PROTOCOL: 'http',
+ QKVIEW_CMD_LOCAL_TIMEOUT: 1 * 60 * 60 * 1000, // 1 hour in milliseconds
STATS_KEY_SEP: '::',
- SYSTEM_CLASS_NAME: 'Telemetry_System',
- SYSTEM_POLLER_CLASS_NAME: 'Telemetry_System_Poller',
STRICT_TLS_REQUIRED: true,
TRACER_DIR: '/var/tmp/telemetry',
USER_AGENT: `f5-telemetry/${VERSION}`,
diff --git a/src/lib/consumers.js b/src/lib/consumers.js
index 25b8376a..7c70db7f 100644
--- a/src/lib/consumers.js
+++ b/src/lib/consumers.js
@@ -9,15 +9,15 @@
'use strict';
const path = require('path');
-const logger = require('./logger.js'); // eslint-disable-line no-unused-vars
-const deepCopy = require('./util.js').deepCopy;
-const tracers = require('./util.js').tracer;
-const constants = require('./constants.js');
-const configWorker = require('./config.js');
-const DataFilter = require('./dataFilter.js').DataFilter;
+const logger = require('./logger'); // eslint-disable-line no-unused-vars
+const deepCopy = require('./util').deepCopy;
+const tracers = require('./util').tracer;
+const constants = require('./constants');
+const configWorker = require('./config');
+const DataFilter = require('./dataFilter').DataFilter;
const CONSUMERS_DIR = constants.CONSUMERS_DIR;
-const CLASS_NAME = constants.CONSUMERS_CLASS_NAME;
+const CLASS_NAME = constants.CONFIG_CLASSES.CONSUMER_CLASS_NAME;
let CONSUMERS = null;
/**
@@ -168,8 +168,8 @@ configWorker.on('change', (config) => {
})
.then(() => {
unloadUnusedModules(typesBefore);
- tracers.remove(null, tracer => tracer.name.startsWith(CLASS_NAME)
- && tracer.lastGetTouch < tracersTimestamp);
+ tracers.remove(tracer => tracer.name.startsWith(CLASS_NAME)
+ && tracer.lastGetTouch < tracersTimestamp);
});
});
diff --git a/src/lib/consumers/Azure_Log_Analytics/index.js b/src/lib/consumers/Azure_Log_Analytics/index.js
index 0ffe6cac..b43175e5 100644
--- a/src/lib/consumers/Azure_Log_Analytics/index.js
+++ b/src/lib/consumers/Azure_Log_Analytics/index.js
@@ -10,7 +10,7 @@
const request = require('request');
const crypto = require('crypto');
-const EVENT_TYPES = require('../../constants.js').EVENT_TYPES;
+const EVENT_TYPES = require('../../constants').EVENT_TYPES;
function makeRequest(requestOptions) {
return new Promise((resolve, reject) => {
@@ -84,6 +84,8 @@ module.exports = function (context) {
// deep copy and parse body, otherwise it will be stringified again
const requestOptionsCopy = JSON.parse(JSON.stringify(requestOptions));
requestOptionsCopy.body = JSON.parse(requestOptionsCopy.body);
+ // redact secrets in Authorization header
+ requestOptionsCopy.headers.Authorization = '*****';
tracerMsg.push(requestOptionsCopy);
}
diff --git a/src/lib/consumers/ElasticSearch/index.js b/src/lib/consumers/ElasticSearch/index.js
index 06918f33..554f39a6 100644
--- a/src/lib/consumers/ElasticSearch/index.js
+++ b/src/lib/consumers/ElasticSearch/index.js
@@ -8,9 +8,9 @@
'use strict';
-const ESClient = require('elasticsearch').Client;
-const util = require('../../util.js');
-const EVENT_TYPES = require('../../constants.js').EVENT_TYPES;
+const elasticsearch = require('elasticsearch');
+const util = require('../../util');
+const EVENT_TYPES = require('../../constants').EVENT_TYPES;
function elasticLogger(logger, tracer) {
@@ -50,7 +50,7 @@ module.exports = function (context) {
path: config.path
},
ssl: {
- rejectUnauthorized: config.allowSelfSignedCert
+ rejectUnauthorized: !config.allowSelfSignedCert
}
};
if (config.username) {
@@ -80,7 +80,7 @@ module.exports = function (context) {
if (context.tracer) {
context.tracer.write(JSON.stringify(payload, null, 4));
}
- const client = new ESClient(clientConfig);
+ const client = new elasticsearch.Client(clientConfig);
client.index(payload)
.then(() => {
context.logger.debug('success');
diff --git a/src/lib/consumers/Generic_HTTP/index.js b/src/lib/consumers/Generic_HTTP/index.js
index 368e0426..b64e0233 100644
--- a/src/lib/consumers/Generic_HTTP/index.js
+++ b/src/lib/consumers/Generic_HTTP/index.js
@@ -43,8 +43,15 @@ module.exports = function (context) {
strictSSL: !context.config.allowSelfSignedCert
};
if (context.tracer) {
+ let tracedHeaders = httpHeaders;
+ // redact Basic Auth passphrase, if provided
+ if (Object.keys(httpHeaders).indexOf('Authorization') > -1) {
+ tracedHeaders = JSON.parse(JSON.stringify(httpHeaders));
+ tracedHeaders.Authorization = '*****';
+ }
+
context.tracer.write(JSON.stringify({
- method, url, headers: httpHeaders, body: JSON.parse(httpBody)
+ method, url, headers: tracedHeaders, body: JSON.parse(httpBody)
}, null, 4));
}
diff --git a/src/lib/consumers/README.md b/src/lib/consumers/README.md
index d551aa4a..54ab0130 100644
--- a/src/lib/consumers/README.md
+++ b/src/lib/consumers/README.md
@@ -42,7 +42,7 @@ This describes the structure of the context object.
Creating and testing new consumers within TS itself by posting expected declaration and watching logs, etc. is an entirely valid way to add a new consumer. However getting the index.js file right initially might require some iteration, and ideally this can be locally. Below is an example script to call the consumer with a mock event.
```javascript
-const index = require('./index.js');
+const index = require('./index');
const mockLogger = {
debug: msg => console.log(msg),
diff --git a/src/lib/consumers/Splunk/dataMapping.js b/src/lib/consumers/Splunk/dataMapping.js
index f6bb76a6..715d6c71 100644
--- a/src/lib/consumers/Splunk/dataMapping.js
+++ b/src/lib/consumers/Splunk/dataMapping.js
@@ -8,6 +8,11 @@
'use strict';
+const TMSTATS_PERIOD_PREFIX = RegExp(/\./g);
+const IPV4_REGEXP = /FFFF([A-Fa-f0-9].)([A-Fa-f0-9].)([A-Fa-f0-9].)([A-Fa-f0-9].)/;
+const IPV6_REGEXP = /([A-Fa-f0-9]{4})([A-Fa-f0-9]{4})([A-Fa-f0-9]{4})([A-Fa-f0-9]{4})([A-Fa-f0-9]{4})([A-Fa-f0-9]{4})([A-Fa-f0-9]{4})([A-Fa-f0-9]{4})/;
+const IPV6_V4_PREFIX_REGEXP = RegExp(/::ffff:/ig);
+
// Canonical format
function defaultFormat(globalCtx) {
const data = globalCtx.event.data;
@@ -48,6 +53,8 @@ const SOURCE_2_TYPES = {
'bigip.objectmodel.cert': 'f5:bigip:config:iapp:json',
'bigip.objectmodel.profile': 'f5:bigip:config:iapp:json',
'bigip.objectmodel.virtual': 'f5:bigip:config:iapp:json',
+ 'bigip.objectmodel.virtual.pools': 'f5:bigip:config:iapp:json',
+ 'bigip.objectmodel.virtual.profiles': 'f5:bigip:config:iapp:json',
'bigip.ihealth.diagnostics': 'f5:bigip:ihealth:iapp:json',
'bigip.tmstats': 'f5:bigip:stats:iapp.json'
};
@@ -134,6 +141,19 @@ function getData(request, key) {
return data;
}
+function formatHexIP(originData) {
+ const data = originData.replace(/:/g, '').substring(0, 32);
+ const matchIpV4 = data.match(IPV4_REGEXP);
+ if (matchIpV4) {
+ return `${parseInt(matchIpV4[1], 16)}.${parseInt(matchIpV4[2], 16)}.${parseInt(matchIpV4[3], 16)}.${parseInt(matchIpV4[4], 16)}`;
+ }
+ const matchIpV6 = data.match(IPV6_REGEXP);
+ if (matchIpV6) {
+ return `${matchIpV6[1]}:${matchIpV6[2]}:${matchIpV6[3]}:${matchIpV6[4]}:${matchIpV6[5]}:${matchIpV6[6]}:${matchIpV6[7]}:${matchIpV6[8]}`;
+ }
+ return originData;
+}
+
function overall(request) {
const data = request.globalCtx.event.data;
const template = getTemplate('bigip.stats.summary', data, request.cache);
@@ -306,6 +326,57 @@ const stats = [
});
},
+ function (request) {
+ const vsStats = getData(request, 'virtualServers');
+ if (vsStats === undefined) return undefined;
+
+ const template = getTemplate('bigip.objectmodel.virtual.profiles', request.globalCtx.event.data, request.cache);
+ const ret = [];
+ Object.keys(vsStats).forEach((vsKey) => {
+ const vsStat = vsStats[vsKey];
+ if (!vsStat.profiles) {
+ return;
+ }
+ const profiles = vsStat.profiles;
+ Object.keys(profiles).forEach((profKey) => {
+ const newData = Object.assign({}, template);
+ newData.event = Object.assign({}, template.event);
+ newData.event.virtual_name = vsKey;
+ newData.event.tenant = vsStat.tenant;
+ newData.event.app = vsStat.application;
+ newData.event.appComponent = '';
+ newData.event.profile_name = profKey;
+ newData.event.profile_type = 'profile';
+ ret.push(newData);
+ });
+ });
+ return ret;
+ },
+
+ function (request) {
+ const vsStats = getData(request, 'virtualServers');
+ if (vsStats === undefined) return undefined;
+
+ const template = getTemplate('bigip.objectmodel.virtual.pools', request.globalCtx.event.data, request.cache);
+ const ret = [];
+ Object.keys(vsStats).forEach((key) => {
+ const vsStat = vsStats[key];
+ if (!vsStat.pool) {
+ return;
+ }
+
+ const newData = Object.assign({}, template);
+ newData.event = Object.assign({}, template.event);
+ newData.event.virtual_name = key;
+ newData.event.app = vsStat.application;
+ newData.event.appComponent = '';
+ newData.event.tenant = vsStat.tenant;
+ newData.event.pool_name = vsStat.pool;
+ ret.push(newData);
+ });
+ return ret;
+ },
+
function (request) {
const poolStats = getData(request, 'pools');
if (poolStats === undefined) return undefined;
@@ -338,23 +409,52 @@ const stats = [
const tmstats = getData(request, 'tmstats');
if (tmstats === undefined) return undefined;
+ const hexIpProps = ['addr', 'source', 'destination'];
const template = getTemplate('bigip.tmstats', request.globalCtx.event.data, request.cache);
const output = [];
- const periodRegex = RegExp(/\./g);
Object.keys(tmstats).forEach((key) => {
+ if (key === 'virtualServerCpuStat') {
+ const tmstData = tmstats[key];
+ // 1) table 'virtualServerStat' should exist
+ // 2) last_cycle_count was removed starting from 13.1+
+ if (tmstats.virtualServerStat && tmstData && tmstData.length
+ && typeof tmstData[0].last_cycle_count === 'undefined') {
+ const vsCycleCount = {};
+ tmstats.virtualServerStat.forEach((entry) => {
+ vsCycleCount[entry.name] = entry.cycle_count;
+ });
+ tmstData.forEach((entry) => {
+ entry.last_cycle_count = vsCycleCount[entry.name];
+ });
+ }
+ }
+
tmstats[key].forEach((entry) => {
const newData = Object.assign({}, template);
// replace periods in tmstat key names with underscores
Object.keys(entry).forEach((entryKey) => {
- if (periodRegex.test(entryKey)) {
- entry[entryKey.replace(periodRegex, '_')] = entry[entryKey];
+ if (TMSTATS_PERIOD_PREFIX.test(entryKey)) {
+ entry[entryKey.replace(TMSTATS_PERIOD_PREFIX, '_')] = entry[entryKey];
delete entry[entryKey];
}
});
newData.source += `.${STAT_2_TMCTL_TABLE[key]}`;
newData.event = Object.assign({}, template.event);
newData.event = Object.assign(newData.event, entry);
+ newData.event.app = newData.event.application;
+ // newData.event.tenant = newData.event.tenant; // just as reminder that tenant is already there
+ newData.event.appComponent = '';
+
+ if (key === 'monitorInstanceStat' && newData.event.ip_address) {
+ newData.event.ip_address = newData.event.ip_address.replace(IPV6_V4_PREFIX_REGEXP, '');
+ }
+ hexIpProps.forEach((hexProp) => {
+ if (newData.event[hexProp]) {
+ newData.event[hexProp] = formatHexIP(newData.event[hexProp]);
+ }
+ });
+
output.push(newData);
});
});
diff --git a/src/lib/consumers/Splunk/index.js b/src/lib/consumers/Splunk/index.js
index 38af1157..de8b1a9b 100644
--- a/src/lib/consumers/Splunk/index.js
+++ b/src/lib/consumers/Splunk/index.js
@@ -11,8 +11,8 @@
const request = require('request');
const zlib = require('zlib');
-const dataMapping = require('./dataMapping.js');
-const EVENT_TYPES = require('../../constants.js').EVENT_TYPES;
+const dataMapping = require('./dataMapping');
+const EVENT_TYPES = require('../../constants').EVENT_TYPES;
const GZIP_DATA = true;
const MAX_CHUNK_SIZE = 99000;
@@ -230,10 +230,18 @@ function forwardData(dataToSend, globalCtx) {
context.request = request.defaults(context.requestOpts);
if (globalCtx.tracer) {
+ // redact passphrase in consumer config
+ const tracedConsumerCtx = JSON.parse(JSON.stringify(context.consumer));
+ tracedConsumerCtx.passphrase = '*****';
+
+ // redact passphrase in request options
+ const traceRequestOpts = JSON.parse(JSON.stringify(context.requestOpts));
+ traceRequestOpts.headers.Authorization = '*****';
+
globalCtx.tracer.write(JSON.stringify({
dataToSend,
- consumer: context.consumer,
- requestOpts: context.requestOpts
+ consumer: tracedConsumerCtx,
+ requestOpts: traceRequestOpts
}, null, 2));
}
diff --git a/src/lib/consumers/Statsd/index.js b/src/lib/consumers/Statsd/index.js
index 92838051..49187238 100644
--- a/src/lib/consumers/Statsd/index.js
+++ b/src/lib/consumers/Statsd/index.js
@@ -15,7 +15,7 @@
// udp/tcp it might be simpler to just implement net module directly for this use case
const StatsD = require('node-statsd');
const deepDiff = require('deep-diff');
-const EVENT_TYPES = require('../../constants.js').EVENT_TYPES;
+const EVENT_TYPES = require('../../constants').EVENT_TYPES;
const stripMetrics = (data) => {
Object.keys(data).forEach((item) => {
diff --git a/src/lib/consumers/Sumo_Logic/index.js b/src/lib/consumers/Sumo_Logic/index.js
index 8ebf1fa5..6bb709b8 100644
--- a/src/lib/consumers/Sumo_Logic/index.js
+++ b/src/lib/consumers/Sumo_Logic/index.js
@@ -32,7 +32,14 @@ module.exports = function (context) {
strictSSL: !config.allowSelfSignedCert
};
if (context.tracer) {
- context.tracer.write(JSON.stringify({ url, headers: httpHeaders, body: JSON.parse(httpBody) }, null, 4));
+ // redact secret from url
+ const tracedUrl = (secret === '' ? url : url
+ .split('/')
+ .slice(0, -1)
+ .join('/')
+ .concat('/*****'));
+ const traceData = { url: tracedUrl, headers: httpHeaders, body: JSON.parse(httpBody) };
+ context.tracer.write(JSON.stringify(traceData, null, 4));
}
// eslint-disable-next-line no-unused-vars
diff --git a/src/lib/customKeywords.js b/src/lib/customKeywords.js
index 462375a5..a60741a6 100644
--- a/src/lib/customKeywords.js
+++ b/src/lib/customKeywords.js
@@ -10,13 +10,14 @@
const Ajv = require('ajv');
const fs = require('fs');
-const constants = require('./constants.js');
-const util = require('./util.js');
-const deviceUtil = require('./deviceUtil.js');
+const constants = require('./constants');
+const util = require('./util');
+const deviceUtil = require('./deviceUtil');
const textNamedKey = 'plainText';
const base64NamedKey = 'plainBase64';
const secureVaultNamedKey = 'SecureVault';
+const secureVaultCipherPrefix = '$M$';
/**
@@ -176,6 +177,61 @@ function expandPointers(str, origin, srcPointer) {
return ret;
}
+/**
+ * Validate path
+ *
+ * @param {Object} origin - origin object
+ * @param {String} srcPath - path to follow
+ * @param {Object} options - options
+ * @param {String} options.path - base path that starts with class name
+ * @param {Integer} options.partsNum - number of parts the value should consist of. 0 - no limits
+ */
+function validateDeclarationPath(origin, srcPath, options) {
+ // Given sample obj
+ // {
+ // class: "The_Class",
+ // collProp: { { key1: val1 }, { key2: val2 } }
+ // }
+
+ // remove leading and trailing '/'
+ const trimPath = val => val.substring(
+ val.startsWith('/') ? 1 : 0,
+ val.endsWith('/') ? (val.length - 1) : val.length
+ );
+
+ // the path defined by the user in their declaration, e.g. The_Class/key_1/.../key_n
+ const dataParts = trimPath(srcPath).split('/');
+ if (options.partsNum && dataParts.length !== options.partsNum) {
+ let exampleFormat = 'ObjectName';
+ if (options.partsNum) {
+ for (let i = 1; i < options.partsNum; i += 1) {
+ exampleFormat = `${exampleFormat}/key${i}`;
+ }
+ } else {
+ exampleFormat = `${exampleFormat}/key1/.../keyN`;
+ }
+ throw new Error(`"${srcPath}" does not follow format "${exampleFormat}"`);
+ }
+
+ // the path defined in the schema {class}/{propLevel_1}/../{propLevel_n}, e.g. The_Class/collProp
+ const schemaParts = trimPath(options.path).split('/');
+ const className = schemaParts[0];
+ const classInstanceName = dataParts[0];
+ let objInstance = origin[classInstanceName];
+
+ if (typeof objInstance !== 'object' || objInstance.class !== className) {
+ throw new Error(`"${classInstanceName}" must be of object type and class "${className}"`);
+ }
+
+ const pathParts = schemaParts.slice(1).concat(dataParts.slice(1));
+ /* eslint-disable no-return-assign */
+ if (!pathParts.every(key => typeof (objInstance = objInstance[key]) !== 'undefined')) {
+ const resolvedPath = `${classInstanceName}/${pathParts.join('/')}`;
+ throw new Error(`Unable to find "${resolvedPath}"`);
+ }
+}
+
+
const keywords = {
f5secret: {
type: 'object',
@@ -189,57 +245,47 @@ const keywords = {
compile(schema, parentSchema) {
// eslint-disable-next-line no-unused-vars
return function (data, dataPath, parentData, propertyName, rootData) {
- const ajvErrors = [];
-
- // we handle a number of passphrase object in this function, the following describes each of them
- // 'cipherText': this means we plan to store a plain text secret locally, which requires we encrypt
- // it first. This also assumes that we are running on a BIG-IP where we have the means to do so
- // 'environmentVar': undefined
-
- // handle 'environmentVar' passphrase object
- if (data[constants.PASSPHRASE_ENVIRONMENT_VAR] !== undefined) {
- return Promise.resolve(true);
- }
- // handle 'cipherText' passphrase object
- if (data[constants.PASSPHRASE_CIPHER_TEXT] !== undefined) {
- // if data is already encrypted just return
- if (data.protected === secureVaultNamedKey) {
- return Promise.resolve(true);
- }
-
- // base64 decode before encrypting - if needed
- if (data.protected === base64NamedKey) {
- data[constants.PASSPHRASE_CIPHER_TEXT] = util.base64('decode', data[constants.PASSPHRASE_CIPHER_TEXT]);
- data.protected = textNamedKey;
- }
-
- return deviceUtil.getDeviceType()
- .then((deviceType) => {
- // check if on a BIG-IP and fail validation if not
- if (deviceType !== constants.BIG_IP_DEVICE_TYPE) {
- throw new Error(`Specifying '${constants.PASSPHRASE_CIPHER_TEXT}' requires running on ${constants.BIG_IP_DEVICE_TYPE}`);
+ return Promise.resolve()
+ .then(() => {
+ /**
+ * we handle a number of passphrase object in this function,
+ * the following describes each of them:
+ * - 'cipherText': this means we plan to store a plain text secret locally,
+ * which requires we encrypt it first. This also assumes that we are
+ * running on a BIG-IP where we have the means to do so.
+ * - 'environmentVar': undefined
+ */
+ if (typeof data[constants.PASSPHRASE_ENVIRONMENT_VAR] !== 'undefined') {
+ return Promise.resolve(true);
+ }
+ if (typeof data[constants.PASSPHRASE_CIPHER_TEXT] === 'undefined') {
+ return Promise.reject(new Error(`missing ${constants.PASSPHRASE_CIPHER_TEXT} or ${constants.PASSPHRASE_ENVIRONMENT_VAR}`));
+ }
+ if (data.protected === secureVaultNamedKey) {
+ if (data[constants.PASSPHRASE_CIPHER_TEXT].startsWith(secureVaultCipherPrefix)) {
+ return Promise.resolve(true);
}
- // encrypt secret
- return deviceUtil.encryptSecret(data[constants.PASSPHRASE_CIPHER_TEXT]);
- })
- .then((secret) => {
- // update text field with secret - should we base64 encode?
- data[constants.PASSPHRASE_CIPHER_TEXT] = secret;
- // set protected key - in case we return validated schema to requestor
- data.protected = secureVaultNamedKey;
-
- // notify success
- return true;
- })
- .catch((e) => {
- ajvErrors.push({ keyword: 'f5secret', message: e.message, params: {} });
- throw new Ajv.ValidationError(ajvErrors);
- });
- }
+ return Promise.reject(new Error(`'${constants.PASSPHRASE_CIPHER_TEXT}' should be encrypted by ${constants.DEVICE_TYPE.BIG_IP} when 'protected' is '${secureVaultNamedKey}'`));
+ }
+ if (data.protected === base64NamedKey) {
+ data[constants.PASSPHRASE_CIPHER_TEXT] = util.base64('decode', data[constants.PASSPHRASE_CIPHER_TEXT]);
+ data.protected = textNamedKey;
+ }
- // if we make it here we should reject with a useful message
- const message = `missing ${constants.PASSPHRASE_CIPHER_TEXT} or ${constants.PASSPHRASE_ENVIRONMENT_VAR}`;
- return Promise.reject(new Ajv.ValidationError([{ keyword: 'f5secret', message, params: {} }]));
+ return deviceUtil.getDeviceType()
+ .then((deviceType) => {
+ if (deviceType !== constants.DEVICE_TYPE.BIG_IP) {
+ return Promise.reject(new Error(`Specifying '${constants.PASSPHRASE_CIPHER_TEXT}' requires running on ${constants.DEVICE_TYPE.BIG_IP}`));
+ }
+ return deviceUtil.encryptSecret(data[constants.PASSPHRASE_CIPHER_TEXT]);
+ })
+ .then((secret) => {
+ data[constants.PASSPHRASE_CIPHER_TEXT] = secret;
+ data.protected = secureVaultNamedKey;
+ return true;
+ });
+ })
+ .catch(e => Promise.reject(new Ajv.ValidationError([{ keyword: 'f5secret', message: e.message || e.toString(), params: {} }])));
};
}
},
@@ -329,20 +375,57 @@ const keywords = {
compile(schema, parentSchema) {
// eslint-disable-next-line no-unused-vars
return function (data, dataPath, parentData, propertyName, rootData) {
- const ajvErrors = [];
- // string passed
if (typeof data === 'string') {
const declarationClass = schema;
- if (!(rootData[data] && rootData[data].class === declarationClass)) {
- ajvErrors.push({
- keyword: 'ihealth',
+ const objectInstance = rootData[data];
+ if (typeof objectInstance !== 'object' || objectInstance.class !== declarationClass) {
+ return Promise.reject(new Ajv.ValidationError([{
+ keyword: 'declarationClass',
message: `declaration with name "${data}" and class "${declarationClass}" doesn't exist`,
params: {}
- });
+ }]));
}
}
- if (ajvErrors.length) {
- return Promise.reject(new Ajv.ValidationError(ajvErrors));
+ return Promise.resolve(true);
+ };
+ }
+ },
+ declarationClassProp: {
+ type: 'string',
+ errors: true,
+ modifying: true,
+ async: true,
+ metaSchema: {
+ type: 'object',
+ description: 'Automatically resolve a path with given {declarationClass}/{propLevel_1}/...{propLevel_n}',
+ properties: {
+ partsNum: {
+ description: 'Expected number of parts the value should consist of. 0 - no limits',
+ type: 'integer',
+ minimum: 0,
+ maximum: 100,
+ default: 0
+ },
+ path: {
+ description: '{declarationClass}/{propLevel_1}/...{propLevel_n}',
+ type: 'string',
+ minLength: 1
+ }
+ }
+ },
+ // eslint-disable-next-line no-unused-vars
+ compile(schema, parentSchema) {
+ return function (data, dataPath, parentData, propertyName, rootData) {
+ if (typeof data === 'string') {
+ try {
+ validateDeclarationPath(rootData, data, schema);
+ } catch (err) {
+ return Promise.reject(new Ajv.ValidationError([{
+ keyword: 'declarationClassProp',
+ message: `${err}`,
+ params: {}
+ }]));
+ }
}
return Promise.resolve(true);
};
@@ -360,7 +443,7 @@ const keywords = {
// eslint-disable-next-line no-unused-vars
validate(schema, data, parentSchema, dataPath, parentData, propertyName, rootData) {
// looks like instance is configured as default
- if (data) {
+ if (typeof data === 'string') {
return new Promise((resolve, reject) => {
fs.access(data, (fs.constants || fs).R_OK, (accessErr) => {
if (accessErr) {
diff --git a/src/lib/dataFilter.js b/src/lib/dataFilter.js
index 75fba210..40378228 100644
--- a/src/lib/dataFilter.js
+++ b/src/lib/dataFilter.js
@@ -8,8 +8,8 @@
'use strict';
-const util = require('./util.js');
-const dataUtil = require('./dataUtil.js');
+const util = require('./util');
+const dataUtil = require('./dataUtil');
/**
* Data Filter Class
@@ -91,13 +91,14 @@ DataFilter.prototype._applyBlacklist = function (data) {
* @param {Object} [actionCtx.excludeData] - 'Exclude' filter definition
* @param {Object} [actionCtx.locations] - The locations of data to be filtered
* @param {Object} [actionCtx.ifAllMatch] - conditions to check before
+ * @param {Object} [actionCtx.ifAnyMatch] - conditions to check before
*
* @returns {void}
*/
function handleAction(dataCtx, actionCtx) {
if ((actionCtx.includeData || actionCtx.excludeData)
- && (util.isObjectEmpty(actionCtx.ifAllMatch)
- || dataUtil.checkConditions(dataCtx.data, actionCtx.ifAllMatch))) {
+ && !util.isObjectEmpty(dataCtx.data) // completely short-circuit if dataCtx.data is empty
+ && dataUtil.checkConditions(dataCtx, actionCtx)) {
if (actionCtx.includeData) {
dataUtil.preserveStrictMatches(dataCtx.data, actionCtx.locations, true);
} else if (actionCtx.excludeData) {
diff --git a/src/lib/dataPipeline.js b/src/lib/dataPipeline.js
index 94f936f4..58211f2f 100644
--- a/src/lib/dataPipeline.js
+++ b/src/lib/dataPipeline.js
@@ -8,12 +8,12 @@
'use strict';
-const logger = require('./logger.js');
-const forwarder = require('./forwarder.js');
-const dataTagging = require('./dataTagging.js');
-const dataFilter = require('./dataFilter.js');
-const util = require('./util.js');
-const EVENT_TYPES = require('./constants.js').EVENT_TYPES;
+const logger = require('./logger');
+const forwarder = require('./forwarder');
+const dataTagging = require('./dataTagging');
+const dataFilter = require('./dataFilter');
+const util = require('./util');
+const EVENT_TYPES = require('./constants').EVENT_TYPES;
/**
diff --git a/src/lib/dataTagging.js b/src/lib/dataTagging.js
index 2182d131..a1230f89 100644
--- a/src/lib/dataTagging.js
+++ b/src/lib/dataTagging.js
@@ -1,5 +1,5 @@
/*
- * Copyright 2018. F5 Networks, Inc. See End User License Agreement ("EULA") for
+ * Copyright 2020. F5 Networks, Inc. See End User License Agreement ("EULA") for
* license terms. Notwithstanding anything to the contrary in the EULA, Licensee
* may copy and modify this software product for its internal business purposes.
* Further, Licensee may upload, publish and distribute the modified version of
@@ -10,10 +10,10 @@
const properties = require('./properties.json');
const normalizeUtil = require('./normalizeUtil');
-const dataUtil = require('./dataUtil.js');
-const util = require('./util.js');
+const dataUtil = require('./dataUtil');
+const util = require('./util');
const systemStatsUtil = require('./systemStatsUtil');
-const EVENT_TYPES = require('./constants.js').EVENT_TYPES;
+const EVENT_TYPES = require('./constants').EVENT_TYPES;
/**
* Handle tagging actions on the data.
@@ -27,16 +27,17 @@ const EVENT_TYPES = require('./constants.js').EVENT_TYPES;
* @param {Object} dataCtx.data - data to process
* @param {String} dataCtx.type - type of data to process
* @param {Object} actionCtx - 'setTag' action to perfrom on the data
+ * @param {Object} deviceCtx - device context
* @param {Object} [actionCtx.setTag] - tag(s) that will be applied
* @param {Object} [actionCtx.locations] - where thae tags should be applied
* @param {Object} [actionCtx.ifAllMatch] - conditions to check before
+ * @param {Object} [actionCtx.ifAnyMatch] - conditions to check before
*
* @returns {void}
*/
function handleAction(dataCtx, actionCtx, deviceCtx) {
if (!util.isObjectEmpty(actionCtx.setTag)
- && (util.isObjectEmpty(actionCtx.ifAllMatch)
- || dataUtil.checkConditions(dataCtx.data, actionCtx.ifAllMatch))) {
+ && dataUtil.checkConditions(dataCtx, actionCtx)) {
addTags(dataCtx, actionCtx, deviceCtx);
}
}
@@ -67,22 +68,28 @@ function addTags(dataCtx, actionCtx, deviceCtx) {
// properties.json - like old-style tagging
if (dataCtx.type === EVENT_TYPES.SYSTEM_POLLER) {
// Apply tags to default locations (where addKeysByTag is true) for system info
- Object.keys(properties.stats).forEach((statKey) => {
- const items = data[statKey];
- const statProp = systemStatsUtil.renderProperty(deviceCtx, properties.stats[statKey]);
- // tags can be applied to objects only - usually it is collections of objects
- // e.g. Virtual Servers, pools, profiles and etc.
- if (typeof items === 'object'
- && !util.isObjectEmpty(items)
- && statProp.normalization
- && statProp.normalization.find(norm => norm.addKeysByTag)) {
- Object.keys(items).forEach((itemKey) => {
- Object.keys(tags).forEach((tagKey) => {
- addTag(items[itemKey], tagKey, tags[tagKey], itemKey, statProp);
+ if (!dataCtx.isCustom) {
+ Object.keys(properties.stats).forEach((statKey) => {
+ const statProp = systemStatsUtil.renderProperty(
+ deviceCtx, util.deepCopy(properties.stats[statKey])
+ );
+ const items = statProp.structure && statProp.structure.parentKey
+ ? (data[statProp.structure.parentKey] || {})[statKey] : data[statKey];
+
+ // tags can be applied to objects only - usually it is collections of objects
+ // e.g. Virtual Servers, pools, profiles and etc.
+ if (typeof items === 'object'
+ && !util.isObjectEmpty(items)
+ && statProp.normalization
+ && statProp.normalization.find(norm => norm.addKeysByTag)) {
+ Object.keys(items).forEach((itemKey) => {
+ Object.keys(tags).forEach((tagKey) => {
+ addTag(items[itemKey], tagKey, tags[tagKey], itemKey, statProp);
+ });
});
- });
- }
- });
+ }
+ });
+ }
} else {
// Apply tags to default locations of events (and not iHealth data)
Object.keys(tags).forEach((tagKey) => {
diff --git a/src/lib/dataUtil.js b/src/lib/dataUtil.js
index 4106ae63..a8c4b1f1 100644
--- a/src/lib/dataUtil.js
+++ b/src/lib/dataUtil.js
@@ -8,39 +8,108 @@
'use strict';
-const logger = require('./logger.js');
+const logger = require('./logger');
+const util = require('./util');
/**
- * Checks the conditions against the data
+ * Checks the conditions against the data.
*
* @private - use for testing only
*
- * @param {Object} data - data to process
- * @param {Object} conditions - conditions to check
+ * @param {Object} dataCtx - complete data context collected from BIG-IP
+ * @param {Object} actionCtx - individual action context
*
- * @returns {Boolean} true when all conditions are met
+ * @returns {Boolean} True if a condition is met, or if a matching function is not provided.
*/
-function checkConditions(data, conditions) {
- // Array.prototype.every will stop on first 'false'
- return Object.keys(conditions).every((condition) => {
- const matches = getMatches(data, condition);
- const conditionVal = conditions[condition];
+function checkConditions(dataCtx, actionCtx) {
+ let func;
+ let condition;
+
+ if (!util.isObjectEmpty(actionCtx.ifAnyMatch)) {
+ func = checkAnyMatches;
+ condition = actionCtx.ifAnyMatch;
+ }
+ if (!util.isObjectEmpty(actionCtx.ifAllMatch)) {
+ func = checkAllMatches;
+ condition = actionCtx.ifAllMatch;
+ }
+ if (func) {
+ return util.isObjectEmpty(dataCtx.data) ? false : func(dataCtx.data, condition);
+ }
+ return true;
+}
+
+function checkAnyMatches(data, matchObjects) {
+ // Use 'Array.prototype.some' to check whether atleast 1 condition is true
+ return matchObjects.some(conditions => checkAllMatches(data, conditions));
+}
+
+function checkAllMatches(data, conditions) {
+ // Use 'Array.prototype.every' to check whether every condition is true
+ return Object.keys(conditions).every((conditionKey) => {
+ const matches = getMatches(data, conditionKey);
if (matches.length === 0) {
- // No matches were found so the condition was not met
- logger.debug(`No matches were found for ${condition}`);
+ logger.debug(`No matches were found for "${conditionKey}"`);
return false;
}
- if (typeof conditionVal !== 'object') {
- // The condition to check is not an object and matches have been found in the data
- return matches.every(match => data[match] === conditionVal
- || data[match].toString().match(conditionVal.toString()));
+
+ const conditionVals = conditions[conditionKey];
+
+ if (typeof conditionVals !== 'object') {
+ return matches.every(match => checkScalarValue(data[match], conditionVals));
+ }
+
+ if (Array.isArray(conditionVals)) {
+ return matches.every((match) => {
+ const dataMatch = data[match];
+ // If conditionVals is array, dataMatch must also be an array for matching to occur.
+ if (!Array.isArray(dataMatch) && dataMatch.length !== conditionVals.length) {
+ return false;
+ }
+
+ // Arrays have no order guarantee; sort both, then compare each array item
+ dataMatch.sort();
+ conditionVals.sort();
+ return conditionVals.every((conditionVal, index) => checkScalarValue(dataMatch[index], conditionVal));
+ });
}
- // The next condition is an object so we do recursion and matches for the key have been found
- return matches.every(match => checkConditions(data[match], conditionVal));
+
+ // the next condition is an object (and not array); do recursion and matches for the key have been found
+ return matches.every(match => checkAllMatches(data[match], conditionVals));
});
}
+function checkScalarValue(data, condition) {
+ // Perform easiest strict equality (===) comparison (before type conversion or regex)
+ // Perform this check first, since we may have null === null, which should evaluate true
+ if (data === condition) {
+ return true;
+ }
+ // Function only performs matching on scalar values against scalar values
+ if (typeof data === 'object' || typeof condition === 'object') {
+ return false;
+ }
+
+ // 'data' and 'condition' are simple scalars - but not strictly equal (different type, or requires regex)
+ try {
+ // Force each to be strings for later comparison
+ condition = condition.toString();
+ data = data.toString();
+
+ // Perform another strict equality (which will be more performant than a regex match),
+ // with types converted to strings, before attempting regex match.
+ return data === condition || data.match(condition);
+ } catch (err) {
+ // Possible to have invalid regex - catch and log error. Return false. Matching unsuccessful.
+ if (err instanceof SyntaxError) {
+ logger.exception(`checkScalarValue error (data = "${data}" condition = "${condition}"): ${err.message || err}`, err);
+ return false;
+ }
+ throw err;
+ }
+}
+
/**
* Check to see if a property can be found inside the data. Checks by using property as a literal string and
* then checks as a regular expression if no results are found from the literal string search.
@@ -123,19 +192,23 @@ function getDeepMatches(data, matchObj) {
* @returns {void}
*/
function searchAnyMatches(data, matchObj, cb) {
- Object.keys(matchObj).forEach((matchKey) => {
- getMatches(data, matchKey).forEach((key) => {
- let item = data[key];
- const nextMatchObj = matchObj[matchKey];
- const nestedKey = cb(key, item);
- if (nestedKey) {
- item = item[nestedKey];
- }
- if (typeof item === 'object' && typeof nextMatchObj === 'object') {
- searchAnyMatches(item, nextMatchObj, cb);
- }
+ if (Array.isArray(matchObj)) {
+ matchObj.forEach(matchItem => searchAnyMatches(data, matchItem, cb));
+ } else {
+ Object.keys(matchObj).forEach((matchKey) => {
+ getMatches(data, matchKey).forEach((key) => {
+ let item = data[key];
+ const nextMatchObj = matchObj[matchKey];
+ const nestedKey = cb(key, item);
+ if (nestedKey) {
+ item = item[nestedKey];
+ }
+ if (typeof item === 'object' && typeof nextMatchObj === 'object') {
+ searchAnyMatches(item, nextMatchObj, cb);
+ }
+ });
});
- });
+ }
}
/**
diff --git a/src/lib/datetimeUtil.js b/src/lib/datetimeUtil.js
index 84e6ebdd..311b1a3d 100644
--- a/src/lib/datetimeUtil.js
+++ b/src/lib/datetimeUtil.js
@@ -8,8 +8,8 @@
'use strict';
-const constants = require('./constants.js');
-const util = require('./util.js');
+const constants = require('./constants');
+const util = require('./util');
/**
diff --git a/src/lib/deviceUtil.js b/src/lib/deviceUtil.js
index 7d8a665a..a2995026 100644
--- a/src/lib/deviceUtil.js
+++ b/src/lib/deviceUtil.js
@@ -13,11 +13,26 @@ const fs = require('fs');
const crypto = require('crypto');
const diff = require('deep-diff');
-const constants = require('./constants.js');
-const logger = require('./logger.js');
-const util = require('./util.js');
+const constants = require('./constants');
+const logger = require('./logger');
+const util = require('./util');
+/**
+ * Cache for info about the device TS is running on
+ *
+ * @property {String} TYPE - device's type - BIG-IP or Container
+ * @property {Object} VERSION - version information
+ * @property {Boolean} RETRIEVE_SECRETS_FROM_TMSH - true when device is affected by bug and you should run
+ * TMSH command to retrieve secret (BZ745423)
+ */
+const HOST_DEVICE_CACHE = {};
+const HDC_KEYS = {
+ TYPE: 'TYPE',
+ VERSION: 'VERSION',
+ RETRIEVE_SECRETS_FROM_TMSH: 'RETRIEVE_SECRETS_FROM_TMSH'
+};
+
/**
* F5 Device async CLI class definition starts here
*/
@@ -427,13 +442,14 @@ DeviceAsyncCLI.prototype._removeAsyncTaskFromDevice = function (taskID, errOk) {
* Helper function for the encryptSecret function
*
* @private
- * @param {Array} splitData - the secret that has been split up
- * @param {Array} dataArray - the array that the encrypted data will be put
- * @param {Integer} index - the endex value used to go through the split data
+ * @param {Array} splitData - the secret that has been split up
+ * @param {Array} dataArray - the array that the encrypted data will be put
+ * @param {Integer} index - the index value used to go through the split data
+ * @param {Boolean} secretsFromTMSH - fetch secrets from TMSH
*
* @returns {Promise} Promise resolved with the encrypted data
*/
-function encryptSecretHelper(splitData, dataArray, index) {
+function encryptSecretHelper(splitData, dataArray, index, secretsFromTMSH) {
let encryptedData = null;
let error = null;
// can't have a + or / in the radius object name, so replace those if they exist
@@ -459,70 +475,139 @@ function encryptSecretHelper(splitData, dataArray, index) {
}
// update text field with Secure Vault cryptogram - should we base64 encode?
encryptedData = res.secret;
- return module.exports.getDeviceVersion(constants.LOCAL_HOST);
- })
- .then((deviceVersion) => {
- let promise;
- if (util.compareVersionStrings(deviceVersion.version, '>=', '14.1')
- && util.compareVersionStrings(deviceVersion.version, '<', '15.0')) {
- // TMOS 14.1.x fix for 745423
- const tmshCmd = `tmsh -a list auth radius-server ${radiusObjectName} secret`;
- promise = module.exports.executeShellCommandOnDevice(constants.LOCAL_HOST, tmshCmd)
- .then((res) => {
- /**
- * auth radius-server telemetry_delete_me {
- * secret
- * }
- */
- encryptedData = res.split('\n')[1].trim().split(' ', 2)[1];
- });
+
+ if (!secretsFromTMSH) {
+ return Promise.resolve();
}
- return promise || Promise.resolve();
+
+ // TMOS 14.1.x fix for 745423
+ const tmshCmd = `tmsh -a list auth radius-server ${radiusObjectName} secret`;
+ return module.exports.executeShellCommandOnDevice(constants.LOCAL_HOST, tmshCmd)
+ .then((tmosOutput) => {
+ /**
+ * auth radius-server telemetry_delete_me {
+ * secret
+ * }
+ */
+ encryptedData = tmosOutput.split('\n')[1].trim().split(' ', 2)[1];
+ });
})
.catch((e) => {
error = e;
})
.then(() => {
+ // remove TMOS object at first to keep BIG-IP clean and then throw error if needed
const httpDeleteOptions = {
method: 'DELETE',
- continueOnErrorCode: true
+ continueOnErrorCode: true // ignore error to avoid UnhandledPromiseRejection error
};
module.exports.makeDeviceRequest(constants.LOCAL_HOST, `${uri}/${radiusObjectName}`, httpDeleteOptions);
if (error) {
throw error;
}
- })
- .then(() => {
if (encryptedData.indexOf(',') !== -1) {
throw new Error('Encrypted data should not have a comma in it');
}
dataArray.push(encryptedData);
index += 1;
if (index < splitData.length) {
- return encryptSecretHelper(splitData, dataArray, index);
+ return encryptSecretHelper(splitData, dataArray, index, secretsFromTMSH);
}
return Promise.resolve(dataArray);
});
}
+/**
+ * Check if TMOS version affected by bug when secrets should be fetched from TMSH only (BZ745423)
+ *
+ * @param {Object} version - TMOS version info
+ * @param {String} version.version - TMOS version string
+ *
+ * @returns {Boolean} true if TMOS version affected by bug
+ */
+function isVersionAffectedBySecretsBug(version) {
+ return util.compareVersionStrings(version.version, '>=', '14.1')
+ && util.compareVersionStrings(version.version, '<', '15.0');
+}
+
+
module.exports = {
+ /**
+ * Gather Host Device Info
+ *
+ * @returns {Promise} resolved once info about Host Device was gathered
+ */
+ gatherHostDeviceInfo() {
+ return this.getDeviceType()
+ .then((deviceType) => {
+ this.setHostDeviceInfo(HDC_KEYS.TYPE, deviceType);
+ return this.getDeviceVersion(constants.LOCAL_HOST);
+ })
+ .then((deviceVersion) => {
+ this.setHostDeviceInfo(HDC_KEYS.VERSION, deviceVersion);
+ this.setHostDeviceInfo(HDC_KEYS.RETRIEVE_SECRETS_FROM_TMSH,
+ isVersionAffectedBySecretsBug(deviceVersion));
+ });
+ },
+
+ /**
+ * Clear Host Device Info
+ *
+ * @param {...String} [key] - key(s) to remove, if absent then all keys will be removed
+ */
+ clearHostDeviceInfo() {
+ const keysToRemove = arguments.length ? arguments : Object.keys(HOST_DEVICE_CACHE);
+ Array.prototype.forEach.call(keysToRemove, (toRemove) => {
+ delete HOST_DEVICE_CACHE[toRemove];
+ });
+ },
+
+ /**
+ * Get Host Device info
+ *
+ * @param {String} [key] - key, if omitted then copy of cache will be returned
+ *
+ * @returns {Object|Any} value from cache for the key or copy of cache if no arguments were passed to function
+ */
+ getHostDeviceInfo(key) {
+ if (arguments.length === 0) {
+ return util.deepCopy(HOST_DEVICE_CACHE);
+ }
+ return HOST_DEVICE_CACHE[key];
+ },
+
+ /**
+ * Set Host Device Info
+ * @param {String} key - key
+ * @param {Any} value - value
+ */
+ setHostDeviceInfo(key, value) {
+ HOST_DEVICE_CACHE[key] = value;
+ },
+
/**
* Performs a check of the local environment and returns device type
*
* @returns {Promise} A promise which is resolved with the device type.
- *
*/
getDeviceType() {
- // eslint-disable-next-line no-unused-vars
- return new Promise((resolve, reject) => {
- // eslint-disable-next-line no-unused-vars
- childProcess.exec('/usr/bin/tmsh -a show sys version', (error, stdout, stderr) => {
- if (error) {
- // don't reject, just assume we are running on a container
- resolve(constants.CONTAINER_DEVICE_TYPE);
+ const deviceType = this.getHostDeviceInfo(HDC_KEYS.TYPE);
+ if (typeof deviceType !== 'undefined') {
+ return Promise.resolve(deviceType);
+ }
+
+ return new Promise((resolve) => {
+ const versionFile = '/VERSION';
+ fs.readFile(versionFile, (err, data) => {
+ // .toString() in case if data is Buffer
+ if (!err && (new RegExp('product:\\s+big-ip', 'i')).test(data.toString())) {
+ resolve(constants.DEVICE_TYPE.BIG_IP);
} else {
- // command did not error so we must be a BIG-IP
- resolve(constants.BIG_IP_DEVICE_TYPE);
+ // don't reject, just assume we are running on a container
+ if (err) {
+ logger.debug(`Unable to read '${versionFile}': ${err}`);
+ }
+ resolve(constants.DEVICE_TYPE.CONTAINER);
}
});
});
@@ -600,8 +685,9 @@ module.exports = {
// should have content-range header
if (!crange) {
const msg = `${respObj.statusCode} ${respObj.statusMessage} ${JSON.stringify(respBody)}`;
- throw new Error(`HTTP Error: ${msg}`);
- } else if (respObj.statusCode >= 200 && respObj.statusCode < 300) {
+ return Promise.reject(new Error(`HTTP Error: ${msg}`));
+ }
+ if (respObj.statusCode >= 200 && respObj.statusCode < 300) {
// handle it in async way, waiting for callabck from write
promise = new Promise((resolve, reject) => {
currentBytes += parseInt(respObj.headers['content-length'], 10);
@@ -616,7 +702,7 @@ module.exports = {
} else {
attempt += 1;
if (attempt >= attemptsOnHTTPerror) {
- error = new Error('Exceeded number of attempts on HTTP error');
+ return Promise.reject(new Error('Exceeded number of attempts on HTTP error'));
}
}
@@ -865,8 +951,19 @@ module.exports = {
* @returns {Object} Returns promise resolved with encrypted secret
*/
encryptSecret(data) {
- const splitData = data.match(/(.|\n).{1,500}/g);
- return encryptSecretHelper(splitData, [], 0).then(result => result.join(','));
+ let affectedByBug = this.getHostDeviceInfo(HDC_KEYS.RETRIEVE_SECRETS_FROM_TMSH);
+ let promise = Promise.resolve();
+
+ if (typeof affectedByBug === 'undefined') {
+ promise = promise.then(() => this.getDeviceVersion(constants.LOCAL_HOST))
+ .then((deviceVersion) => {
+ affectedByBug = isVersionAffectedBySecretsBug(deviceVersion);
+ });
+ }
+ return promise.then(() => {
+ const splitData = data.match(/(.|\n){1,500}/g);
+ return encryptSecretHelper(splitData, [], 0, affectedByBug).then(result => result.join(','));
+ });
},
/**
@@ -978,8 +1075,9 @@ module.exports = {
// return (modified) data
return data;
})
- .catch((e) => {
- throw e;
+ .catch((err) => {
+ const msg = `decryptAllSecrets: ${err}`;
+ throw new Error(msg);
});
},
diff --git a/src/lib/endpointLoader.js b/src/lib/endpointLoader.js
index f39a4f88..79253370 100644
--- a/src/lib/endpointLoader.js
+++ b/src/lib/endpointLoader.js
@@ -8,19 +8,61 @@
'use strict';
-const deviceUtil = require('./deviceUtil.js');
-const constants = require('./constants.js');
-const util = require('./util.js');
-const logger = require('./logger.js');
+const deviceUtil = require('./deviceUtil');
+const constants = require('./constants');
+const util = require('./util');
+const logger = require('./logger');
+/** @module EndpointLoader */
+
+/**
+ * Options to use to expand reference
+ *
+ * @typedef {Object} ExpandReferencesOpts
+ * @property {String} [endpointSuffix] - URI suffix to use to modify link
+ * @property {Boolean} [includeStats] - include response from /stats
+ */
+/**
+ * References to expand
+ *
+ * @typedef {Object.} newEndpoints - list of endpoints to add
*/
EndpointLoader.prototype.setEndpoints = function (newEndpoints) {
- this.endpoints = {};
- newEndpoints.forEach((endpoint) => {
- // if 'name' presented then use it as unique ID
- // otherwise using 'endpoint' prop
- this.endpoints[endpoint.name || endpoint.endpoint] = endpoint;
- });
+ if (Array.isArray(newEndpoints)) {
+ this.endpoints = {};
+ newEndpoints.forEach((endpoint) => {
+ // if 'name' presented then use it as unique ID
+ // otherwise use path prop
+ this.endpoints[endpoint.name || endpoint.path] = endpoint;
+ });
+ } else {
+ this.endpoints = newEndpoints;
+ }
};
/**
* Authenticate on target device
*
- * @returns {Object} Promise which is resolved when successfully authenticated
+ * @returns {Promise} Promise which is resolved when successfully authenticated
*/
EndpointLoader.prototype.auth = function () {
if (this.options.credentials.token) {
return Promise.resolve();
}
- // in case of optimization, replace with Object.assign
- const options = util.deepCopy(this.options.connection);
+ const options = Object.assign({}, this.options.connection);
return deviceUtil.getAuthToken(
this.host, this.options.credentials.username, this.options.credentials.passphrase, options
)
.then((token) => {
this.options.credentials.token = token.token;
- })
- .catch((err) => {
- throw err;
});
};
-
/**
* Load data from endpoint
*
@@ -82,59 +134,158 @@ EndpointLoader.prototype.auth = function () {
* @param {Object} [options] - function options
* @param {Object} [options.replaceStrings] - key/value pairs that replace matching strings in request body
*
- * @returns {Object} Promise resolved with fetched data
+ * @returns {Promise} Promise resolved with FetchedData
*/
EndpointLoader.prototype.loadEndpoint = function (endpoint, options) {
- const opts = options || {};
- const endpointObj = this.endpoints[endpoint];
-
+ let endpointObj = this.endpoints[endpoint];
if (endpointObj === undefined) {
return Promise.reject(new Error(`Endpoint not defined in file: ${endpoint}`));
}
-
- let dataIsEmpty = false;
- if (this.cachedResponse[endpoint] === undefined) {
- dataIsEmpty = true;
+ // TODO: fix it later, right now it doesn't work with multiple concurrent connections
+ if (!endpointObj.ignoreCached && typeof this.cachedResponse[endpoint] !== 'undefined') {
+ return Promise.resolve(this.cachedResponse[endpoint]);
}
-
- if ((endpointObj || {}).ignoreCached) {
- dataIsEmpty = true;
+ if ((options || {}).replaceStrings) {
+ endpointObj = Object.assign({}, endpointObj);
+ endpointObj.body = this.replaceBodyVars(endpointObj.body, options.replaceStrings);
}
-
- return Promise.resolve()
- .then(() => {
- if (dataIsEmpty) {
- return this._getAndExpandData(endpointObj, { replaceStrings: opts.replaceStrings });
- }
- return Promise.resolve(this.cachedResponse[endpoint]);
- })
+ return this.getAndExpandData(endpointObj)
.then((response) => {
- if (dataIsEmpty) {
- // Cache data for later calls
- this.cachedResponse[endpoint] = response;
- }
+ this.cachedResponse[endpoint] = response;
return Promise.resolve(response);
})
.catch((err) => {
- logger.error(`Error: EndpointLoader.loadEndpoint: ${endpoint}: ${err}`);
+ this.logger.error(`Error: EndpointLoader.loadEndpoint: ${endpoint}: ${err}`);
return Promise.reject(err);
});
};
+
+/**
+ * Expand references
+ *
+ * @param {module:EndpointLoader~Endpoint} endpointObj - endpoint object
+ * @param {module:EndpointLoader~FetchedData} data - fetched data
+ *
+ * @returns {Promise>} resolved with array of FetchedData
+ */
+EndpointLoader.prototype.expandReferences = function (endpointObj, data) {
+ const promises = [];
+ const dataItems = data.data.items;
+ if (endpointObj.expandReferences && dataItems && Array.isArray(dataItems) && dataItems.length) {
+ // for now let's just support a single reference
+ const referenceKey = Object.keys(endpointObj.expandReferences)[0];
+ const referenceObj = endpointObj.expandReferences[referenceKey];
+ for (let i = 0; i < dataItems.length; i += 1) {
+ const item = dataItems[i][referenceKey];
+ if (item && item.link) {
+ let referenceEndpoint = this.getURIPath(item.link);
+ if (referenceObj.endpointSuffix) {
+ referenceEndpoint = `${referenceEndpoint}${referenceObj.endpointSuffix}`;
+ }
+ if (referenceObj.includeStats) {
+ promises.push(this.getData(`${referenceEndpoint}/stats`, { name: i, refKey: referenceKey }));
+ }
+ promises.push(this.getData(referenceEndpoint, { name: i, refKey: referenceKey }));
+ }
+ }
+ }
+ return Promise.all(promises);
+};
+/**
+ * Fetch stats for each item
+ *
+ * @param {module:EndpointLoader~Endpoint} endpointObj - endpoint object
+ * @param {Object} data - data
+ * @param {String} data.name - name
+ * @param {Object} data.data - data to process
+ *
+ * @returns {Promise>}} resolved with array of FetchedData
+ */
+EndpointLoader.prototype.fetchStats = function (endpointObj, data) {
+ const promises = [];
+ const dataItems = data.data.items;
+ if (endpointObj.includeStats && dataItems && Array.isArray(dataItems) && dataItems.length) {
+ for (let i = 0; i < dataItems.length; i += 1) {
+ const item = dataItems[i];
+ // check for selfLink property
+ if (item.selfLink) {
+ promises.push(this.getData(`${this.getURIPath(item.selfLink)}/stats`, { name: i }));
+ }
+ }
+ }
+ return Promise.all(promises);
+};
+/**
+ * Substitute data
+ *
+ * @param {module:EndpointLoader~FetchedData} baseData - base data
+ * @param {Array} dataArray - array of data to use for substitution
+ * @param {Boolean} shallowCopy - true if shallow copy required else
+ * original object will be used
+ */
+EndpointLoader.prototype.substituteData = function (baseData, dataArray, shallowCopy) {
+ if (!dataArray.length) {
+ return;
+ }
+ const baseDataItems = baseData.data.items;
+ dataArray.forEach((data) => {
+ try {
+ let dataToSubstitute;
+ if (shallowCopy === true) {
+ dataToSubstitute = Object.assign(data.data, baseDataItems[data.name]);
+ } else {
+ dataToSubstitute = data.data;
+ }
+ if (data.refKey) {
+ // if this is the first time substituting data, overwrite the containing object with data
+ // e.g.
+ // itemsRef: {
+ // link: 'http://objLink/objItems',
+ // isSubcollection: true
+ // }
+ // will become:
+ // itemsRef: {
+ // objItemProp1: 123 //data from link
+ // }
+ if (baseDataItems[data.name][data.refKey].link) {
+ baseDataItems[data.name][data.refKey] = dataToSubstitute;
+ } else {
+ // otherwise if same object has been previously substituted
+ // and we're merging new set of props from a different link (e.g. objItems/stats)
+ // then copy over the properties of the new dataToSubstitute
+ // e.g.
+ // itemsRef: {
+ // objItemProp1: 123
+ // objItemProp2: true
+ // }
+ Object.assign(baseDataItems[data.name][data.refKey], dataToSubstitute);
+ }
+ } else {
+ baseDataItems[data.name] = dataToSubstitute;
+ }
+ } catch (e) {
+ // just continue
+ }
+ });
+};
/**
* Get data for specific endpoint
*
* @param {String} uri - uri where data resides
- * @param {Object} options - function options
+ * @param {Object} [options] - function options
* @param {String} [options.name] - name of key to store as, will override default of uri
* @param {String} [options.body] - body to send, sent via POST request
+ * @param {String} [options.refKey] - reference key
* @param {String[]} [options.endpointFields] - restrict collection to these fields
*
- * @returns {Object} Promise which is resolved with data
+ * @returns {Promise} resolved with FetchedData
*/
-EndpointLoader.prototype._getData = function (uri, options) {
- logger.debug(`EndpointLoader._getData: loading data from URI = ${uri}`);
- // remove parse-stringify in case of optimizations
- const httpOptions = Object.assign({}, util.deepCopy(this.options.connection));
+EndpointLoader.prototype.getData = function (uri, options) {
+ this.logger.debug(`EndpointLoader.getData: loading data from URI = ${uri}`);
+
+ options = options || {};
+ const httpOptions = Object.assign({}, this.options.connection);
+
httpOptions.credentials = {
username: this.options.credentials.username,
token: this.options.credentials.token
@@ -147,165 +298,86 @@ EndpointLoader.prototype._getData = function (uri, options) {
maxTries: 3,
backoff: 100
};
-
- let fullUri = uri;
- if (options.endpointFields) {
- fullUri = `${fullUri}?$select=${options.endpointFields.join(',')}`;
- }
-
+ const fullUri = options.endpointFields ? `${uri}?$select=${options.endpointFields.join(',')}` : uri;
return util.retryPromise(() => deviceUtil.makeDeviceRequest(this.host, fullUri, httpOptions), retryOpts)
.then((data) => {
- // use uri unless name is explicitly provided
- const nameToUse = options.name !== undefined ? options.name : uri;
- const ret = { name: nameToUse, data };
+ const ret = {
+ name: options.name !== undefined ? options.name : uri,
+ data
+ };
+ if (options.refKey) {
+ ret.refKey = options.refKey;
+ }
return ret;
- })
- .catch((err) => {
- throw err;
});
};
/**
* Get data for specific endpoint (with some extra logic)
*
- * @param {Object} endpointProperties - endpoint properties
- * @param {Object} [options] - function options
- * @param {Object} [options.replaceStrings] - key/value pairs that replace matching strings in request body
- *
- * @returns {Object} Promise which is resolved with data
+ * @param {module:EndpointLoader~Endpoint} endpointObj - endpoint object
+ * @param {String} endpointObj.path - URI path to get data from
+ * @returns {Promise} resolved with FetchedData
*/
-EndpointLoader.prototype._getAndExpandData = function (endpointProperties, options) {
- const opts = options || {};
- const p = endpointProperties;
- let completeData;
- let referenceKey;
- const childItemKey = 'items'; // assume we are looking inside of 'items'
-
- // remote protocol, host and query params
- const fixEndpoint = i => i.replace('https://localhost', '').split('?')[0];
-
- const substituteData = (data, childKey, assign) => {
- // this tells us we need to modify the data
- if (completeData) {
- data.forEach((i) => {
- try {
- let dataToSubstitute;
- if (assign === true) {
- dataToSubstitute = Object.assign(i.data, completeData.data[childItemKey][i.name]);
- } else {
- dataToSubstitute = i.data;
- }
-
- if (childKey) {
- // if this is the first time substituting data, overwrite the containing object with data
- // e.g.
- // itemsRef: {
- // link: 'http://objLink/objItems',
- // isSubcollection: true
- // }
- // will become:
- // itemsRef: {
- // objItemProp1: 123 //data from link
- // }
- if (completeData.data[childItemKey][i.name][childKey].link) {
- completeData.data[childItemKey][i.name][childKey] = dataToSubstitute;
- } else {
- // otherwise if same object has been previously substituted
- // and we're merging new set of props from a different link (e.g. objItems/stats)
- // then copy over the properties of the new dataToSubstitute
- // e.g.
- // itemsRef: {
- // objItemProp1: 123
- // objItemProp2: true
- // }
- Object.assign(completeData.data[childItemKey][i.name][childKey], dataToSubstitute);
- }
- } else {
- completeData.data[childItemKey][i.name] = dataToSubstitute;
- }
- } catch (e) {
- // just continue
- }
- });
- return Promise.resolve(completeData); // return substituted data
- }
- return Promise.resolve(data); // return data
- };
-
- const replaceBodyVars = (body, replaceStrings) => {
- let bodyStr = JSON.stringify(body);
-
- Object.keys(replaceStrings).forEach((key) => {
- bodyStr = bodyStr.replace(new RegExp(key), replaceStrings[key]);
+EndpointLoader.prototype.getAndExpandData = function (endpointObj) {
+ // baseData in this method is the data fetched from endpointObj.path
+ return this.getData(endpointObj.path, endpointObj)
+ // Promise below will be resolved with array of 2 elements:
+ // [ baseData, [refData, refData] ]
+ .then(baseData => Promise.all([
+ Promise.resolve(baseData),
+ this.expandReferences(endpointObj, baseData)
+ ]))
+ .then((dataArray) => {
+ // dataArray === [ baseData, [refData, refData] ]
+ const baseData = dataArray[0];
+ this.substituteData(baseData, dataArray[1], false);
+ return Promise.all([
+ Promise.resolve(baseData),
+ this.fetchStats(endpointObj, baseData)
+ ]);
+ })
+ // Promise below will be resolved with array of 2 elements:
+ // [ baseData, [statsData, statsData] ]
+ .then((dataArray) => {
+ // dataArray === [ baseData, [statsData, statsData] ]
+ const baseData = dataArray[0];
+ this.substituteData(baseData, dataArray[1], true);
+ return baseData;
});
+};
- return JSON.parse(bodyStr);
- };
-
- const body = opts.replaceStrings ? replaceBodyVars(p.body, opts.replaceStrings) : p.body;
-
- return this._getData(
- p.endpoint,
- { name: p.name, body, endpointFields: p.endpointFields }
- )
- .then((data) => {
- // data: { name: foo, data: bar }
- // check if expandReferences property was specified
- if (p.expandReferences) {
- completeData = data;
- const actualData = data.data;
- // set default value if not exists
- actualData[childItemKey] = actualData[childItemKey] === undefined ? [] : actualData[childItemKey];
- // for now let's just support a single reference
- referenceKey = Object.keys(p.expandReferences)[0];
- const referenceObj = p.expandReferences[Object.keys(p.expandReferences)[0]];
-
- const promises = [];
- if (typeof actualData === 'object' && Array.isArray(actualData[childItemKey])) {
- for (let i = 0; i < actualData[childItemKey].length; i += 1) {
- const item = actualData[childItemKey][i];
- // first check for reference and then link property
- if (item[referenceKey] && item[referenceKey].link) {
- let referenceEndpoint = fixEndpoint(item[referenceKey].link);
- if (referenceObj.endpointSuffix) {
- referenceEndpoint = `${referenceEndpoint}${referenceObj.endpointSuffix}`;
- }
- if (referenceObj.includeStats) {
- promises.push(this._getData(`${referenceEndpoint}/stats`, { name: i }));
- }
- promises.push(this._getData(referenceEndpoint, { name: i }));
- }
- }
- }
- return Promise.all(promises);
- }
- return Promise.resolve(data); // just return the data
- })
- .then(data => substituteData(data, referenceKey, false))
- .then((data) => {
- completeData = null;
- // check if includeStats property was specified
- if (p.includeStats) {
- completeData = data;
- const actualData = data.data;
+/**
+ * Replace variables in body with values
+ *
+ * @param {Object|String} body - request body
+ * @param {Object} keys - keys/vars to replace
+ *
+ * @returns {Object|String}
+ */
+EndpointLoader.prototype.replaceBodyVars = function (body, keys) {
+ let isObject = false;
+ if (typeof body !== 'string') {
+ isObject = true;
+ body = JSON.stringify(body);
+ }
+ Object.keys(keys).forEach((key) => {
+ body = body.replace(new RegExp(key), keys[key]);
+ });
+ if (isObject) {
+ body = JSON.parse(body);
+ }
+ return body;
+};
- const promises = [];
- if (typeof actualData === 'object' && Array.isArray(actualData[childItemKey])) {
- for (let i = 0; i < actualData[childItemKey].length; i += 1) {
- const item = actualData[childItemKey][i];
- // check for selfLink property
- if (item.selfLink) {
- promises.push(this._getData(`${fixEndpoint(item.selfLink)}/stats`, { name: i }));
- }
- }
- }
- return Promise.all(promises);
- }
- return Promise.resolve(data); // just return the data
- })
- .then(data => substituteData(data, null, true))
- .catch((err) => {
- throw err;
- });
+/**
+ * Get URI path
+ *
+ * @param {String} uri - URI
+ *
+ * @returns {String} URI path
+ */
+EndpointLoader.prototype.getURIPath = function (uri) {
+ return uri.replace('https://localhost', '').split('?')[0];
};
module.exports = EndpointLoader;
diff --git a/src/lib/eventListener.js b/src/lib/eventListener.js
index 9e913c86..2cf65e20 100644
--- a/src/lib/eventListener.js
+++ b/src/lib/eventListener.js
@@ -11,23 +11,23 @@
const net = require('net');
const dgram = require('dgram');
-const logger = require('./logger.js');
-const constants = require('./constants.js');
-const normalize = require('./normalize.js');
-const dataPipeline = require('./dataPipeline.js');
-const configWorker = require('./config.js');
+const logger = require('./logger');
+const constants = require('./constants');
+const normalize = require('./normalize');
+const dataPipeline = require('./dataPipeline');
+const configWorker = require('./config');
const properties = require('./properties.json');
-const tracers = require('./util.js').tracer;
-const stringify = require('./util.js').stringify;
-const isObjectEmpty = require('./util.js').isObjectEmpty;
+const tracers = require('./util').tracer;
+const stringify = require('./util').stringify;
+const isObjectEmpty = require('./util').isObjectEmpty;
const global = properties.global;
const events = properties.events;
const definitions = properties.definitions;
const DEFAULT_PORT = constants.DEFAULT_EVENT_LISTENER_PORT;
-const CLASS_NAME = constants.EVENT_LISTENER_CLASS_NAME;
+const CLASS_NAME = constants.CONFIG_CLASSES.EVENT_LISTENER_CLASS_NAME;
const MAX_BUFFER_SIZE = 16 * 1024; // 16k chars
const MAX_BUFFER_TIMEOUTS = 5;
@@ -454,6 +454,6 @@ configWorker.on('change', (config) => {
});
logger.debug(`${Object.keys(listeners).length} event listener(s) listening`);
- tracers.remove(null, tracer => tracer.name.startsWith(CLASS_NAME)
- && tracer.lastGetTouch < tracersTimestamp);
+ tracers.remove(tracer => tracer.name.startsWith(CLASS_NAME)
+ && tracer.lastGetTouch < tracersTimestamp);
});
diff --git a/src/lib/forwarder.js b/src/lib/forwarder.js
index 95d671da..e7b9259a 100644
--- a/src/lib/forwarder.js
+++ b/src/lib/forwarder.js
@@ -8,8 +8,8 @@
'use strict';
-const logger = require('./logger.js'); // eslint-disable-line no-unused-vars
-const consumersHndlr = require('./consumers.js');
+const logger = require('./logger'); // eslint-disable-line no-unused-vars
+const consumersHndlr = require('./consumers');
/**
* Forward data to consumer
diff --git a/src/lib/ihealth.js b/src/lib/ihealth.js
index c04491d8..203261b3 100644
--- a/src/lib/ihealth.js
+++ b/src/lib/ihealth.js
@@ -8,17 +8,17 @@
'use strict';
-const logger = require('./logger.js'); // eslint-disable-line no-unused-vars
-const constants = require('./constants.js');
-const util = require('./util.js');
-const configWorker = require('./config.js');
-const iHealthPoller = require('./ihealthPoller.js');
-const dataPipeline = require('./dataPipeline.js');
-const normalize = require('./normalize.js');
+const logger = require('./logger'); // eslint-disable-line no-unused-vars
+const constants = require('./constants');
+const util = require('./util');
+const configWorker = require('./config');
+const iHealthPoller = require('./ihealthPoller');
+const dataPipeline = require('./dataPipeline');
+const normalize = require('./normalize');
const properties = require('./properties.json').ihealth;
-const SYSTEM_CLASS_NAME = constants.SYSTEM_CLASS_NAME;
-const IHEALTH_POLLER_CLASS_NAME = constants.IHEALTH_POLLER_CLASS_NAME;
+const SYSTEM_CLASS_NAME = constants.CONFIG_CLASSES.SYSTEM_CLASS_NAME;
+const IHEALTH_POLLER_CLASS_NAME = constants.CONFIG_CLASSES.IHEALTH_POLLER_CLASS_NAME;
/** @module ihealth */
@@ -241,8 +241,8 @@ configWorker.on('change', (config) => {
}
});
- util.tracer.remove(null, tracer => tracer.name.startsWith(IHEALTH_POLLER_CLASS_NAME)
- && tracer.lastGetTouch < tracersTimestamp);
+ util.tracer.remove(tracer => tracer.name.startsWith(IHEALTH_POLLER_CLASS_NAME)
+ && tracer.lastGetTouch < tracersTimestamp);
logger.debug(`${Object.keys(pollers).length} iHealth poller(s) running`);
});
diff --git a/src/lib/ihealthPoller.js b/src/lib/ihealthPoller.js
index 2e334f4f..e4745d78 100644
--- a/src/lib/ihealthPoller.js
+++ b/src/lib/ihealthPoller.js
@@ -8,17 +8,17 @@
'use strict';
-const logger = require('./logger.js'); // eslint-disable-line no-unused-vars
-const constants = require('./constants.js');
-const datetimeUtil = require('./datetimeUtil.js');
-const util = require('./util.js');
-const deviceUtil = require('./deviceUtil.js');
-const ihUtil = require('./ihealthUtil.js');
-const persistentStorage = require('./persistentStorage.js').persistentStorage;
-const configWorker = require('./config.js');
-
-const SYSTEM_CLASS_NAME = constants.SYSTEM_CLASS_NAME;
-const IHEALTH_POLLER_CLASS_NAME = constants.IHEALTH_POLLER_CLASS_NAME;
+const logger = require('./logger'); // eslint-disable-line no-unused-vars
+const constants = require('./constants');
+const datetimeUtil = require('./datetimeUtil');
+const util = require('./util');
+const deviceUtil = require('./deviceUtil');
+const ihUtil = require('./ihealthUtil');
+const persistentStorage = require('./persistentStorage').persistentStorage;
+const configWorker = require('./config');
+
+const SYSTEM_CLASS_NAME = constants.CONFIG_CLASSES.SYSTEM_CLASS_NAME;
+const IHEALTH_POLLER_CLASS_NAME = constants.CONFIG_CLASSES.IHEALTH_POLLER_CLASS_NAME;
const PERSISTENT_STORAGE_KEY = 'ihealth';
const IHEALTH_POLL_MAX_TIMEOUT = 60 * 60 * 1000; // 1 h.
diff --git a/src/lib/ihealthUtil.js b/src/lib/ihealthUtil.js
index 43639ff3..1a7a6d11 100644
--- a/src/lib/ihealthUtil.js
+++ b/src/lib/ihealthUtil.js
@@ -13,10 +13,10 @@ const fs = require('fs');
const path = require('path');
const request = require('request');
-const logger = require('./logger.js');
-const constants = require('./constants.js');
-const util = require('./util.js');
-const deviceUtil = require('./deviceUtil.js');
+const logger = require('./logger');
+const constants = require('./constants');
+const util = require('./util');
+const deviceUtil = require('./deviceUtil');
/** @module ihealthUtil */
@@ -816,7 +816,7 @@ QkviewManager.prototype.prepare = function () {
.then(() => deviceUtil.getDeviceType())
.then((deviceType) => {
this.deviceType = deviceType;
- if (this.deviceType === constants.BIG_IP_DEVICE_TYPE) {
+ if (this.deviceType === constants.DEVICE_TYPE.BIG_IP) {
return this.checkIsItLocalDevice();
}
return Promise.resolve();
diff --git a/src/lib/logger.js b/src/lib/logger.js
index e9c8bfaa..e044a96e 100644
--- a/src/lib/logger.js
+++ b/src/lib/logger.js
@@ -8,6 +8,8 @@
'use strict';
+/** @module logger */
+
let logger;
try {
// eslint-disable-next-line global-require, import/no-unresolved
diff --git a/src/lib/normalize.js b/src/lib/normalize.js
index b86e9539..1f8e4cba 100644
--- a/src/lib/normalize.js
+++ b/src/lib/normalize.js
@@ -8,9 +8,9 @@
'use strict';
-const logger = require('./logger.js'); // eslint-disable-line no-unused-vars
-const constants = require('./constants.js');
-const normalizeUtil = require('./normalizeUtil.js');
+const logger = require('./logger'); // eslint-disable-line no-unused-vars
+const constants = require('./constants');
+const normalizeUtil = require('./normalizeUtil');
/**
@@ -164,8 +164,10 @@ module.exports = {
try {
return normalizeUtil[options.func](args);
} catch (e) {
- logger.exception(`runCustomFunction failed: ${e}`, e);
- throw new Error(`runCustomFunction failed: ${e}`);
+ const errMsg = `runCustomFunction '${options.func}' failed: ${e}`;
+ logger.exception(errMsg, e);
+ e.message = errMsg;
+ throw e;
}
},
@@ -208,7 +210,9 @@ module.exports = {
const keysToReduce = ['nestedStats', 'value', 'description', 'color'];
for (let i = 0; i < keysToReduce.length; i += 1) {
const item = data[keysToReduce[i]];
- if (item !== undefined && Object.keys(data).length === 1) return this._reduceData(item, options);
+ if (item !== undefined && Object.keys(data).length === 1) {
+ return this._reduceData(item, options);
+ }
}
// .entries evaluates to true if data is an array
@@ -285,10 +289,19 @@ module.exports = {
* @param {Object} options - options
* @param {Array} [options.skip] - array of child object keys to skip
* @param {Array} [options.classifyByKeys] - classify by specific keys (used by events)
+ * @param {Array} [options.tags] - tags to apply in addition to "tags"
*
* @returns {Object} Returns data with added tags
*/
_addKeysByTag(data, tags, definitions, options) {
+ tags = Object.assign({}, tags);
+ if (options && options.tags) {
+ Object.keys(options.tags).forEach((key) => {
+ if (typeof tags[key] === 'undefined') {
+ tags[key] = options.tags[key];
+ }
+ });
+ }
const tagKeys = Object.keys(tags);
const skip = options.skip || [];
const def = definitions || {};
@@ -301,15 +314,20 @@ module.exports = {
// then check if the tag value contains 'pattern'
// otherwise assume the tag value is a 'constant'
let tagValue = tags[t];
- if (tagValue in def) tagValue = def[tagValue]; // overwrite with def value
-
+ if (tagValue in def) {
+ tagValue = def[tagValue]; // overwrite with def value
+ }
if (tagValue.pattern) {
const match = normalizeUtil._checkForMatch(key, tagValue.pattern, tagValue.group);
- if (match) val = match;
+ if (match) {
+ val = match;
+ }
} else {
val = tagValue;
}
- thisData[t] = val;
+ if (val) {
+ thisData[t] = val;
+ }
});
return thisData;
};
@@ -549,11 +567,11 @@ module.exports = {
includeFirstEntry: (options.normalization.find(n => n.includeFirstEntry)
|| {}).includeFirstEntry
};
- ret = this._reduceData(data, reduceDataOptions);
+ ret = this._reduceData(norm.useCurrentData ? ret : data, reduceDataOptions);
setReduced = true;
// get data by key
- ret = options.key ? this._getDataByKey(ret, options.key) : ret;
+ ret = options.key && !norm.keepKey ? this._getDataByKey(ret, options.key) : ret;
}
if (norm.filterKeys) {
diff --git a/src/lib/normalizeConfig.js b/src/lib/normalizeConfig.js
new file mode 100644
index 00000000..686e6109
--- /dev/null
+++ b/src/lib/normalizeConfig.js
@@ -0,0 +1,285 @@
+/*
+ * Copyright 2020. F5 Networks, Inc. See End User License Agreement ("EULA") for
+ * license terms. Notwithstanding anything to the contrary in the EULA, Licensee
+ * may copy and modify this software product for its internal business purposes.
+ * Further, Licensee may upload, publish and distribute the modified version of
+ * the software product on devcentral.f5.com.
+ */
+
+'use strict';
+
+const constants = require('./constants');
+const logger = require('./logger');
+const util = require('./util');
+
+const CONFIG_CLASSES = constants.CONFIG_CLASSES;
+
+/** @module normalizeConfig */
+
+function getTelemetryObjects(originalConfig, className) {
+ return originalConfig[className] || {};
+}
+
+function getTelemetrySystems(originalConfig) {
+ return getTelemetryObjects(originalConfig, CONFIG_CLASSES.SYSTEM_CLASS_NAME);
+}
+
+function getTelemetrySystemPollers(originalConfig) {
+ return getTelemetryObjects(originalConfig, CONFIG_CLASSES.SYSTEM_POLLER_CLASS_NAME);
+}
+
+function getTelemetryEndpoints(originalConfig) {
+ return getTelemetryObjects(originalConfig, CONFIG_CLASSES.ENDPOINTS_CLASS_NAME);
+}
+
+/**
+ * Force allowSelfSignedCert to default value if not specified
+ *
+ * @param {Object} originalConfig - origin config
+ */
+function verifyAllowSelfSignedCert(originalConfig) {
+ const telemetrySystems = getTelemetrySystems(originalConfig);
+ Object.keys(telemetrySystems).forEach((systemName) => {
+ const system = telemetrySystems[systemName];
+ if (typeof system.allowSelfSignedCert === 'undefined') {
+ system.allowSelfSignedCert = !constants.STRICT_TLS_REQUIRED;
+ }
+ });
+}
+
+/**
+ * Expand endpoints references in Telemetry_System objects
+ *
+ * @param {Object} originalConfig - origin config
+ */
+function normalizeTelemetryEndpoints(originalConfig) {
+ const telemetryEndpoints = getTelemetryEndpoints(originalConfig);
+ const telemetrySystems = getTelemetrySystems(originalConfig);
+
+ function computeBasePath(endpoint) {
+ let basePath = '';
+ if (endpoint.basePath && endpoint.basePath.length > 0) {
+ const pathPrefix = endpoint.basePath.startsWith('/') ? '' : '/';
+ if (endpoint.basePath.endsWith('/')) {
+ basePath = endpoint.basePath.substring(0, endpoint.basePath.length - 1);
+ } else {
+ basePath = endpoint.basePath;
+ }
+ basePath = `${pathPrefix}${basePath}`;
+ }
+ endpoint.basePath = basePath;
+ }
+
+ function fixEndpointPath(endpoint) {
+ endpoint.path = endpoint.path.startsWith('/') ? endpoint.path : `/${endpoint.path}`;
+ }
+
+ function parseEndpointItem(endpoint, key) {
+ const innerEndpoint = util.deepCopy(endpoint.items[key]);
+ fixEndpointPath(innerEndpoint);
+ innerEndpoint.enable = endpoint.enable && innerEndpoint.enable;
+ innerEndpoint.path = `${endpoint.basePath}${innerEndpoint.path}`;
+ innerEndpoint.name = innerEndpoint.name || key;
+ return innerEndpoint;
+ }
+
+ function processEndpoint(endpoint, cb) {
+ if (typeof endpoint === 'object') {
+ // array of definitions - can be all of the following
+ if (Array.isArray(endpoint)) {
+ endpoint.forEach(innerEndpoint => processEndpoint(innerEndpoint, cb));
+ // endpoint is Telemetry_Endpoints
+ } else if (endpoint.class === CONFIG_CLASSES.ENDPOINTS_CLASS_NAME || endpoint.items) {
+ // don't need to copy 'endpoint' because it was either reference or inline config
+ computeBasePath(endpoint);
+ Object.keys(endpoint.items).forEach(key => cb(parseEndpointItem(endpoint, key)));
+ // endpoint is Telemetry_Endpoint
+ } else if (typeof endpoint.path === 'string') {
+ fixEndpointPath(endpoint);
+ cb(endpoint);
+ }
+ } else if (typeof endpoint === 'string') {
+ const refs = endpoint.split('/');
+ // reference to a Telemetry_Endpoints object
+ // format is ObjectName/pathName
+ endpoint = telemetryEndpoints[refs[0]];
+ if (refs.length > 1) {
+ // reference to a child of Telemetry_Endpoints.items
+ const item = endpoint.items[refs[1]];
+ endpoint = {
+ items: { [refs[1]]: item },
+ basePath: endpoint.basePath,
+ enable: endpoint.enable
+ };
+ }
+ processEndpoint(endpoint, cb);
+ }
+ }
+
+ Object.keys(telemetrySystems).forEach((systemName) => {
+ const system = telemetrySystems[systemName];
+ system.systemPollers.forEach((poller) => {
+ if (Object.prototype.hasOwnProperty.call(poller, 'endpointList')) {
+ const endpointList = {};
+ processEndpoint(poller.endpointList, (endpoint) => {
+ if (endpoint.enable) {
+ endpointList[endpoint.name] = endpoint;
+ } else {
+ logger.debug(`${systemName}: ignoring disabled endpoint '${endpoint.name}' ('${endpoint.path}')`);
+ }
+ });
+ poller.endpointList = endpointList;
+ }
+ });
+ });
+}
+
+/**
+ * Expand references in Telemetry_System objects
+ * Note: as result each System and its System Pollers will have 'name' property with actual name
+ *
+ * @param {Object} originalConfig - origin config
+ */
+function normalizeTelemetrySystems(originalConfig) {
+ const sysPollersToDelete = {};
+ const telemetrySystems = getTelemetrySystems(originalConfig);
+ const telemetrySystemPollers = getTelemetrySystemPollers(originalConfig);
+ const keysToCopy = [
+ 'actions', 'enable', 'endpointList',
+ 'interval', 'tag', 'trace'
+ ];
+
+ const copySystemPoller = (systemPoller) => {
+ const newSystemPoller = {};
+ systemPoller = util.deepCopy(systemPoller);
+ keysToCopy.forEach((key) => {
+ if (Object.prototype.hasOwnProperty.call(systemPoller, key)) {
+ newSystemPoller[key] = systemPoller[key];
+ }
+ });
+ return newSystemPoller;
+ };
+
+ const createSystemPollerName = id => `SystemPoller_${id}`;
+
+ Object.keys(telemetrySystems).forEach((systemName) => {
+ const system = telemetrySystems[systemName];
+ system.name = systemName;
+
+ system.systemPollers = system.systemPoller;
+ delete system.systemPoller;
+
+ if (!Array.isArray(system.systemPollers)) {
+ system.systemPollers = system.systemPollers ? [system.systemPollers] : [];
+ }
+ // existing Telemetry_System_Poller names
+ const existingNames = [];
+ system.systemPollers.forEach((systemPoller, index, pollers) => {
+ // systemPoller can be either string or object
+ if (typeof systemPoller === 'string') {
+ // expand reference and replace it with existing configuration
+ sysPollersToDelete[systemPoller] = true;
+ pollers[index] = copySystemPoller(telemetrySystemPollers[systemPoller]);
+ pollers[index].name = systemPoller;
+ existingNames.push(systemPoller);
+ }
+ });
+ // time to assign name to pollers without name
+ let nameID = 0;
+ system.systemPollers.forEach((systemPoller) => {
+ if (typeof systemPoller.name === 'undefined') {
+ do {
+ nameID += 1;
+ systemPoller.name = createSystemPollerName(nameID);
+ } while (existingNames.indexOf(systemPoller.name) !== -1);
+ }
+ });
+ });
+ // remove System Pollers that were used as references
+ Object.keys(sysPollersToDelete).forEach((key) => {
+ delete telemetrySystemPollers[key];
+ });
+}
+
+/**
+ * Convert Telemetry_System_Poller to Telemetry_System
+ * Note: as result each System and its System Pollers will have 'name' property with actual name
+ *
+ * @param {Object} originalConfig - origin config
+ */
+function normalizeTelemetrySystemPollers(originalConfig) {
+ const telemetrySystems = getTelemetrySystems(originalConfig);
+ const telemetrySystemPollers = getTelemetrySystemPollers(originalConfig);
+ const keysToCopy = [
+ 'allowSelfSignedCert', 'enable', 'enableHostConnectivityCheck', 'host',
+ 'port', 'protocol', 'passphrase', 'trace', 'username'
+ ];
+ const skipDelete = ['enable', 'trace'];
+ delete originalConfig[CONFIG_CLASSES.SYSTEM_POLLER_CLASS_NAME];
+
+ function createSystemFromSystemPoller(systemPollerName, systemPoller) {
+ /**
+ * if Telemetry_System_Poller is not referenced by any of Telemetry_System
+ * then it should be converted to Telemetry_System.
+ * Don't need to make copy of origin object.
+ */
+ delete systemPoller.class;
+ const newSystem = {
+ class: CONFIG_CLASSES.SYSTEM_CLASS_NAME
+ };
+ keysToCopy.forEach((key) => {
+ if (Object.prototype.hasOwnProperty.call(systemPoller, key)) {
+ newSystem[key] = systemPoller[key];
+ if (skipDelete.indexOf(key) === -1) {
+ delete systemPoller[key];
+ }
+ }
+ });
+
+ systemPoller.name = systemPollerName;
+ newSystem.name = systemPollerName;
+ newSystem.systemPollers = [systemPoller];
+ return newSystem;
+ }
+
+ Object.keys(telemetrySystemPollers).forEach((systemPollerName) => {
+ logger.debug(`Converting ${CONFIG_CLASSES.SYSTEM_POLLER_CLASS_NAME} '${systemPollerName}' to ${CONFIG_CLASSES.SYSTEM_CLASS_NAME}`);
+ telemetrySystems[systemPollerName] = createSystemFromSystemPoller(
+ systemPollerName, telemetrySystemPollers[systemPollerName]
+ );
+ });
+ if (Object.keys(telemetrySystems).length) {
+ originalConfig[CONFIG_CLASSES.SYSTEM_CLASS_NAME] = telemetrySystems;
+ }
+}
+
+/**
+ * Normalize configuration and expand all references.
+ *
+ * @param {object} originalConfig - original config, should be copied by caller
+ *
+ * @returns {Object} normalized configuration with expanded references
+ */
+module.exports = function (originalConfig) {
+ /**
+ * Assume that originalConfig is valid declaration.
+ * originalConfig should look like following:
+ * {
+ * 'classNameA': {
+ * 'objectA': {},
+ * 'objectB': {}
+ * },
+ * ...
+ * 'classNameZ': {
+ * 'objectY': {},
+ * 'objectZ': {}
+ * }
+ * }
+ */
+ // TODO: add normalization for Telemetry_iHealth_Poller and Telemetry_Listener classes
+ normalizeTelemetrySystems(originalConfig);
+ normalizeTelemetrySystemPollers(originalConfig);
+ verifyAllowSelfSignedCert(originalConfig);
+ normalizeTelemetryEndpoints(originalConfig);
+ return originalConfig;
+};
diff --git a/src/lib/normalizeUtil.js b/src/lib/normalizeUtil.js
index c46284ee..40a7f1f6 100644
--- a/src/lib/normalizeUtil.js
+++ b/src/lib/normalizeUtil.js
@@ -8,11 +8,32 @@
'use strict';
-const logger = require('./logger.js'); // eslint-disable-line no-unused-vars
-const util = require('./util.js');
+const logger = require('./logger'); // eslint-disable-line no-unused-vars
+const util = require('./util');
+const constants = require('./constants');
module.exports = {
+ /**
+ * Format MAC address
+ *
+ * @param {String} mac - MAC address
+ *
+ * @returns {String} formatted MAC address
+ */
+ _formatMACAddress(mac) {
+ // expect ':' in mac addr - aa:b:cc:d:ee:f
+ if (mac.indexOf(':') === -1) {
+ return mac;
+ }
+ return mac.split(':').map((item) => {
+ item = item.toUpperCase();
+ if (item.length === 1) {
+ item = `0${item}`;
+ }
+ return item;
+ }).join(':');
+ },
/**
* Convert array to map using provided options
@@ -186,6 +207,48 @@ module.exports = {
return data;
},
+ /**
+ * Restructure host-info to collect CPU statistics for the host(s) and cpu(s)
+ * that match the provided key pattern.
+ * This function depends upon the exact output from the host-info endpoint, and will requires
+ * that every object key is unique.
+ *
+ * @param {Object} args - args object
+ * @param {Object} [args.data] - data to process (always included)
+ * @param {Object} [args.keyPattern] - pattern used to traverse object keys
+ *
+ * @returns {Object} Returns matching sub-properties
+ */
+ restructureHostCpuInfo(args) {
+ if (!args.keyPattern) {
+ throw new Error('Argument keyPattern required');
+ }
+ const data = args.data;
+ if (typeof data !== 'object') {
+ return data;
+ }
+ const keys = args.keyPattern.split(constants.STATS_KEY_SEP);
+
+ const findMatches = (inputData) => {
+ if (keys.length === 0) {
+ return inputData;
+ }
+ const keyExp = new RegExp(keys.splice(0, 1));
+ const matchedData = {};
+
+ Object.keys(inputData).forEach((dataItem) => {
+ if (keyExp.test(dataItem)) {
+ // Capture ALL sub-properties if property matches, instead of iterating over object keys
+ // Will overwrite matching keys in 'matchedData' - assumption is that *EVERY* key is unique
+ Object.assign(matchedData, inputData[dataItem]);
+ }
+ });
+ return findMatches(matchedData);
+ };
+ const result = findMatches(data);
+ return Object.keys(result).length === 0 ? 'missing data' : result;
+ },
+
/**
* Average values
*
@@ -196,15 +259,22 @@ module.exports = {
* @returns {Object} Returns averaged value
*/
getAverage(args) {
- if (!args.keyWithValue) { throw new Error('Argument keyWithValue required'); }
+ if (!args.keyWithValue) {
+ throw new Error('Argument keyWithValue required');
+ }
const data = args.data;
+ if (typeof data !== 'object') {
+ return data;
+ }
const values = [];
// for now assume in object, could also be provided an array and just average that
Object.keys(data).forEach((k) => {
const key = args.keyWithValue;
// throw error if key is missing
- if (!(key in data[k])) { throw new Error(`Expecting key: ${key} in object: ${util.stringify(data[k])}`); }
+ if (!(key in data[k])) {
+ throw new Error(`Expecting key: ${key} in object: ${util.stringify(data[k])}`);
+ }
values.push(data[k][key]);
});
const averageFunc = arr => Math.round(arr.reduce((a, b) => a + b, 0) / arr.length);
@@ -267,17 +337,30 @@ module.exports = {
/**
* getPercentFromKeys
*
- * @param {Object} args - args object
- * @param {Object} [args.data] - data to process (always included)
- * @param {Object} [args.totalKey] - key containing total (max) value
- * @param {Object} [args.partialKey] - key containing partial value, such as free or used
- * @param {Object} [args.inverse] - inverse percentage
+ * @param {Object} args - args object
+ * @param {Object} [args.data] - data to process (always included)
+ * @param {Object} [args.totalKey] - key containing total (max) value
+ * @param {Object} [args.partialKey] - key containing partial value, such as free or used
+ * @param {Object} [args.inverse] - inverse percentage
+ * @param {Object} [args.nestedObjects] - whether or not to traverse sub-objects for keys
*
* @returns {Object} Returns calculated percentage
*/
getPercentFromKeys(args) {
const data = args.data;
+ const accumulateSubKeys = (arg, dataKeys) => dataKeys
+ .map(key => data[key][arg])
+ .reduce((acc, val) => acc + val);
+
+ if (args.nestedObjects && typeof data === 'object') {
+ // Get object keys before modifying the data object
+ const dataKeys = Object.keys(data);
+ [args.partialKey, args.totalKey].forEach((arg) => {
+ data[arg] = accumulateSubKeys(arg, dataKeys);
+ });
+ }
+
// this should result in a number between 0 and 100 (percentage)
let ret = Math.round(data[args.partialKey] / data[args.totalKey] * 100);
ret = args.inverse ? 100 - ret : ret;
@@ -354,6 +437,25 @@ module.exports = {
return newRules;
},
+ /**
+ * Convert map to array using provided options
+ *
+ * @param {Object} data - data
+ *
+ * @returns {Object} Converted data
+ */
+ convertMapToArray(data) {
+ const ret = [];
+ data = data.data;
+
+ if (typeof data !== 'object') {
+ throw new Error(`convertMapToArray() object required: ${util.stringify(data)}`);
+ }
+
+ Object.keys(data).forEach(key => ret.push(data[key]));
+ return ret;
+ },
+
/**
* restructureGslbPool
*
@@ -415,6 +517,114 @@ module.exports = {
delete item.poolsCname;
});
+ return data;
+ },
+
+ /**
+ * Normalize MAC Address - upper case and etc.
+ *
+ * @param {Object} args - args object
+ * @param {Object} [args.data] - data to process (always included)
+ * @param {Array.} [args.properties] - list of properties to format
+ *
+ * @returns {Object} Returns formatted data
+ */
+ normalizeMACAddress(args) {
+ let data = args.data;
+ if (data) {
+ if (typeof args.properties === 'undefined') {
+ data = this._formatMACAddress(data);
+ } else {
+ const properties = args.properties;
+ const stack = [data];
+ let obj;
+
+ const forKey = (key) => {
+ const val = obj[key];
+ if (typeof val === 'object') {
+ if (val !== null) {
+ stack.push(val);
+ }
+ } else if (properties.indexOf(key) !== -1 && typeof val === 'string') {
+ obj[key] = this._formatMACAddress(val);
+ }
+ };
+
+ while (stack.length) {
+ obj = stack[0];
+ Object.keys(obj).forEach(forKey);
+ stack.shift();
+ }
+ }
+ }
+ return data;
+ },
+
+ /**
+ * Restructure Virtual Server Profiles
+ *
+ * @param {Object} args - args object
+ * @param {Object} [args.data] - data to process (always included)
+ *
+ * @returns {Object} Returns formatted data
+ */
+ restructureVirtualServerProfiles(args) {
+ /**
+ * Possible issues:
+ * profiles: {
+ * name: 'profiles', <---- should be removed
+ * items: { <---- should be removed
+ * name: 'items', <---- should be removed
+ * profile1: { <---- should be moved one level up
+ * name: 'profile1',
+ * .....
+ * }
+ * }
+ * }
+ */
+ const data = args.data;
+ if (data) {
+ Object.keys(data).forEach((vsName) => {
+ const vsObj = data[vsName];
+ if (vsObj.profiles) {
+ const profiles = vsObj.profiles;
+ delete profiles.name;
+
+ if (profiles.items) {
+ delete profiles.items.name;
+
+ Object.keys(profiles.items).forEach((profileName) => {
+ profiles[profileName] = profiles.items[profileName];
+ });
+ delete profiles.items;
+ }
+ }
+ });
+ }
+ return data;
+ },
+
+ /**
+ * Get value by key/path
+ *
+ * @param {Object} args - args object
+ * @param {Object} [args.data] - data to process (always included)
+ * @param {Array} [args.path] - path to fetch data from
+ *
+ * @returns {Object} Returns value that belongs to key/path
+ */
+ getValue(args) {
+ let data = args.data;
+ if (data && args.path) {
+ args.path.every((key) => {
+ data = data[key];
+ if (typeof data === 'undefined') {
+ data = 'missing data';
+ return false;
+ }
+ return true;
+ });
+ }
return data;
}
};
diff --git a/src/lib/paths.json b/src/lib/paths.json
index cb1ef2ec..bb783af2 100644
--- a/src/lib/paths.json
+++ b/src/lib/paths.json
@@ -1,209 +1,226 @@
{
"endpoints": [
{
- "endpoint": "/mgmt/tm/sys/global-settings"
+ "path": "/mgmt/tm/sys/global-settings"
},
{
- "endpoint": "/mgmt/tm/cm/device"
+ "path": "/mgmt/tm/cm/device"
},
{
- "endpoint": "/mgmt/tm/sys/hardware"
+ "path": "/mgmt/tm/sys/hardware"
},
{
- "endpoint": "/mgmt/tm/sys/version"
+ "path": "/mgmt/tm/sys/version"
},
{
- "endpoint": "/mgmt/tm/sys/ready"
+ "path": "/mgmt/tm/sys/ready"
},
{
- "endpoint": "/mgmt/tm/cm/sync-status"
+ "path": "/mgmt/tm/cm/sync-status"
},
{
- "endpoint": "/mgmt/tm/cm/failover-status"
+ "path": "/mgmt/tm/cm/failover-status"
},
{
- "endpoint": "/mgmt/tm/sys/clock"
+ "path": "/mgmt/tm/sys/clock"
},
{
- "endpoint": "/mgmt/tm/sys/host-info"
+ "path": "/mgmt/tm/sys/host-info"
},
{
- "endpoint": "/mgmt/tm/sys/memory"
+ "path": "/mgmt/tm/sys/memory"
},
{
- "endpoint": "/mgmt/tm/sys/management-ip"
+ "path": "/mgmt/tm/sys/management-ip"
},
{
"name": "provisioning",
- "endpoint": "/mgmt/tm/sys/provision"
+ "path": "/mgmt/tm/sys/provision"
},
{
"name": "networkInterfaces",
- "endpoint": "/mgmt/tm/net/interface/stats"
+ "path": "/mgmt/tm/net/interface/stats"
},
{
"name": "networkTunnels",
- "endpoint": "/mgmt/tm/net/tunnels/tunnel/stats"
+ "path": "/mgmt/tm/net/tunnels/tunnel/stats"
},
{
"name": "tmmInfo",
- "endpoint": "/mgmt/tm/sys/tmm-info"
+ "path": "/mgmt/tm/sys/tmm-info"
},
{
"name": "tmmTraffic",
- "endpoint": "/mgmt/tm/sys/tmm-traffic"
+ "path": "/mgmt/tm/sys/tmm-traffic"
},
{
"name": "aWideIps",
- "endpoint": "/mgmt/tm/gtm/wideip/a",
+ "path": "/mgmt/tm/gtm/wideip/a",
"includeStats": true
},
{
"name": "aaaaWideIps",
- "endpoint": "/mgmt/tm/gtm/wideip/aaaa",
+ "path": "/mgmt/tm/gtm/wideip/aaaa",
"includeStats": true
},
{
"name": "cnameWideIps",
- "endpoint": "/mgmt/tm/gtm/wideip/cname",
+ "path": "/mgmt/tm/gtm/wideip/cname",
"includeStats": true
},
{
"name": "mxWideIps",
- "endpoint": "/mgmt/tm/gtm/wideip/mx",
+ "path": "/mgmt/tm/gtm/wideip/mx",
"includeStats": true
},
{
"name": "naptrWideIps",
- "endpoint": "/mgmt/tm/gtm/wideip/naptr",
+ "path": "/mgmt/tm/gtm/wideip/naptr",
"includeStats": true
},
{
"name": "srvWideIps",
- "endpoint": "/mgmt/tm/gtm/wideip/srv",
+ "path": "/mgmt/tm/gtm/wideip/srv",
"includeStats": true
},
{
"name": "aPools",
- "endpoint": "/mgmt/tm/gtm/pool/a",
+ "path": "/mgmt/tm/gtm/pool/a",
"includeStats": true,
"expandReferences": { "membersReference": { "includeStats": true } }
},
{
"name": "aaaaPools",
- "endpoint": "/mgmt/tm/gtm/pool/aaaa",
+ "path": "/mgmt/tm/gtm/pool/aaaa",
"includeStats": true,
"expandReferences": { "membersReference": { "includeStats": true } }
},
{
"name": "cnamePools",
- "endpoint": "/mgmt/tm/gtm/pool/cname",
+ "path": "/mgmt/tm/gtm/pool/cname",
"includeStats": true,
"expandReferences": { "membersReference": { "includeStats": true } }
},
{
"name": "mxPools",
- "endpoint": "/mgmt/tm/gtm/pool/mx",
+ "path": "/mgmt/tm/gtm/pool/mx",
"includeStats": true,
"expandReferences": { "membersReference": { "includeStats": true } }
},
{
"name": "naptrPools",
- "endpoint": "/mgmt/tm/gtm/pool/naptr",
+ "path": "/mgmt/tm/gtm/pool/naptr",
"includeStats": true,
"expandReferences": { "membersReference": { "includeStats": true } }
},
{
"name": "srvPools",
- "endpoint": "/mgmt/tm/gtm/pool/srv",
+ "path": "/mgmt/tm/gtm/pool/srv",
"includeStats": true,
"expandReferences": { "membersReference": { "includeStats": true } }
},
{
"name": "virtualServers",
- "endpoint": "/mgmt/tm/ltm/virtual",
+ "path": "/mgmt/tm/ltm/virtual",
"includeStats": true,
- "endpointFields": [ "name", "fullPath", "selfLink", "appService", "ipProtocol", "mask", "pool" ]
+ "endpointFields": [ "name", "fullPath", "selfLink", "appService", "ipProtocol", "mask", "pool", "profilesReference" ],
+ "expandReferences": { "profilesReference": { "endpointSuffix": "?$select=name,fullPath" } }
},
{
"name": "pools",
- "endpoint": "/mgmt/tm/ltm/pool",
+ "path": "/mgmt/tm/ltm/pool",
"includeStats": true,
"expandReferences": { "membersReference": { "endpointSuffix": "/stats" } }
},
{
"name": "ltmPolicies",
- "endpoint": "/mgmt/tm/ltm/policy/stats"
+ "path": "/mgmt/tm/ltm/policy/stats"
},
{
"name": "sslCerts",
- "endpoint": "/mgmt/tm/sys/file/ssl-cert"
+ "path": "/mgmt/tm/sys/file/ssl-cert"
},
{
"name": "diskStorage",
- "endpoint": "/mgmt/tm/util/bash",
- "body": "{ \"command\": \"run\", \"utilCmdArgs\": \"-c \\\"/bin/df -P | /usr/bin/tr -s ' ' ','\\\"\" }"
+ "path": "/mgmt/tm/util/bash",
+ "ignoreCached": true,
+ "body": {
+ "command": "run",
+ "utilCmdArgs": "-c \"/bin/df -P | /usr/bin/tr -s ' ' ','\""
+ }
},
{
"name": "diskLatency",
- "endpoint": "/mgmt/tm/util/bash",
- "body": "{ \"command\": \"run\", \"utilCmdArgs\": \"-c \\\"/usr/bin/iostat -x -d | /usr/bin/tail -n +3 | /usr/bin/tr -s ' ' ','\\\"\" }"
+ "path": "/mgmt/tm/util/bash",
+ "ignoreCached": true,
+ "body": {
+ "command": "run",
+ "utilCmdArgs": "-c \"/usr/bin/iostat -x -d | /usr/bin/tail -n +3 | /usr/bin/tr -s ' ' ','\""
+ }
},
{
"name": "httpProfiles",
- "endpoint": "/mgmt/tm/ltm/profile/http/stats"
+ "path": "/mgmt/tm/ltm/profile/http/stats"
},
{
"name": "clientSslProfiles",
- "endpoint": "/mgmt/tm/ltm/profile/client-ssl/stats"
+ "path": "/mgmt/tm/ltm/profile/client-ssl/stats"
},
{
"name": "serverSslProfiles",
- "endpoint": "/mgmt/tm/ltm/profile/server-ssl/stats"
+ "path": "/mgmt/tm/ltm/profile/server-ssl/stats"
},
{
"name": "deviceGroups",
- "endpoint": "/mgmt/tm/cm/device-group",
+ "path": "/mgmt/tm/cm/device-group",
"includeStats": true
},
{
"name": "asmQuery",
- "endpoint": "/mgmt/tm/util/bash",
- "body": "{ \"command\": \"run\", \"utilCmdArgs\": \"-c \\\"/bin/mysql -uroot -p$(/bin/perl -MPassCrypt -nle 'print PassCrypt::decrypt_password($_)' /var/db/mysqlpw) PLC -B -e 'select CASE WHEN max(event_time) IS NOT NULL THEN \\\\\\\"Pending Policy Changes\\\\\\\" ELSE \\\\\\\"Policies Consistent\\\\\\\" END as asm_state, max(event_time) as last_asm_change from PL_CONFIG_LOG where event_type <> 2 and element_type <> 18 and event_time > (select max(from_date) as asm_last_changed from PL_POLICY_HISTORY)' | sed 's/\\t/,/'\\\"\"}"
+ "path": "/mgmt/tm/util/bash",
+ "ignoreCached": true,
+ "body": {
+ "command": "run",
+ "utilCmdArgs": "-c \"/bin/mysql -uroot -p$(/bin/perl -MPassCrypt -nle 'print PassCrypt::decrypt_password($_)' /var/db/mysqlpw) PLC -B -e 'select CASE WHEN max(event_time) IS NOT NULL THEN \\\"Pending Policy Changes\\\" ELSE \\\"Policies Consistent\\\" END as asm_state, max(event_time) as last_asm_change from PL_CONFIG_LOG where event_type <> 2 and element_type <> 18 and event_time > (select max(from_date) as asm_last_changed from PL_POLICY_HISTORY)' | sed 's/\\t/,/'\""
+ }
},
{
"name": "apmState",
- "endpoint": "/mgmt/tm/util/bash",
- "body": "{ \"command\": \"run\", \"utilCmdArgs\": \"-c \\\"/bin/unbuffer /usr/bin/guishell -c \\\\\\\"select case when max(config_sync_state) >= 0 then case when max(config_sync_state) > 0 then 'Pending Policy Changes' else 'Policies Consistent' end end from profile_access_misc_stat;\\\\\\\" | tr '\\n' ' ' | sed -r 's/.*\\\\|\\\\s*\\\\|.*\\\\| ([^|]*) \\\\|.*/apm_state\\\\n\\\\1/'\\\"\"}"
+ "path": "/mgmt/tm/util/bash",
+ "ignoreCached": true,
+ "body": {
+ "command": "run",
+ "utilCmdArgs": "-c \"/bin/unbuffer /usr/bin/guishell -c \\\"select max(config_sync_state) from profile_access_misc_stat;\\\" | tr '\\n' ' ' | sed -r 's/.*\\\\|\\\\s*\\\\|.*\\\\|\\\\s*([^|]*)\\\\s*\\\\|.*/apm_state\\n\\\\1/'\""
+ }
},
{
"name": "firewallCurrentState",
- "endpoint": "/mgmt/tm/security/firewall/current-state/stats"
+ "path": "/mgmt/tm/security/firewall/current-state/stats"
},
{
"name": "ltmConfigTime",
- "endpoint": "/mgmt/tm/sys/db/ltm.configtime"
+ "path": "/mgmt/tm/sys/db/ltm.configtime"
},
{
"name": "gtmConfigTime",
- "endpoint": "/mgmt/tm/sys/db/gtm.configtime"
+ "path": "/mgmt/tm/sys/db/gtm.configtime"
},
{
"name": "iRules",
- "endpoint": "/mgmt/tm/ltm/rule/stats"
+ "path": "/mgmt/tm/ltm/rule/stats"
},
{
"name": "tmctl",
- "endpoint": "/mgmt/tm/util/bash",
+ "path": "/mgmt/tm/util/bash",
"ignoreCached": true,
"body": {
"command": "run",
- "utilCmdArgs": "-c '/bin/tmctl $tmctlArgs'"
+ "utilCmdArgs": "-c 'tmctl $tmctlArgs'"
}
},
{
"name": "deviceInfo",
- "endpoint": "/mgmt/shared/identified-devices/config/device-info"
+ "path": "/mgmt/shared/identified-devices/config/device-info"
}
]
}
diff --git a/src/lib/persistentStorage.js b/src/lib/persistentStorage.js
index abb9e067..e50cec0b 100644
--- a/src/lib/persistentStorage.js
+++ b/src/lib/persistentStorage.js
@@ -8,7 +8,8 @@
'use strict';
-const logger = require('./logger.js');
+const logger = require('./logger');
+const util = require('./util');
/** @module persistentStorage */
@@ -26,7 +27,7 @@ const logger = require('./logger.js');
*
* @param {String} key - key to be searched in the storage
*
- * @returns {Promise.} Promise resolved with data
+ * @returns {Promise.} Promise resolved with copy data
*/
/**
* Set data to the specified key
@@ -38,7 +39,7 @@ const logger = require('./logger.js');
* @param {String} key - key to be used
* @param {} data - data to be set to the key
*
- * @returns {Promise} Promise resolved when data saved to the storage
+ * @returns {Promise} Promise resolved when copy data saved to the storage
*/
/**
* Remove data by the specified key
@@ -87,11 +88,20 @@ function PersistentStorageProxy(storage) {
/** @inheritdoc */
PersistentStorageProxy.prototype.get = function (key) {
- return this.storage.get(key);
+ return this.storage.get(key)
+ .then((value) => {
+ if (typeof value === 'object') {
+ value = util.deepCopy(value);
+ }
+ return Promise.resolve(value);
+ });
};
/** @inheritdoc */
PersistentStorageProxy.prototype.set = function (key, data) {
+ if (typeof data === 'object') {
+ data = util.deepCopy(data);
+ }
return this.storage.set(key, data);
};
@@ -186,7 +196,7 @@ RestStorage.prototype._load = function () {
loadPromise = loadPromise.then(() => this._unsafeLoad())
.then((state) => {
this._loadPromise = null;
- this._cache = this._validateLoadedState(state || {});
+ this._cache = this._validateLoadedState(state || this._getBaseState());
loadPromise.loadResults = this._cache._data_;
logger.debug('RestStorage.load: application state loaded');
})
diff --git a/src/lib/properties.json b/src/lib/properties.json
index 561cfc34..7f71343a 100644
--- a/src/lib/properties.json
+++ b/src/lib/properties.json
@@ -31,61 +31,63 @@
},
"version": {
"structure": { "parentKey": "system" },
- "key": "/mgmt/tm/sys/version::sys/version/0::Version"
+ "key": "deviceInfo::version"
},
"versionBuild": {
"structure": { "parentKey": "system" },
- "key": "/mgmt/tm/sys/version::sys/version/0::Build"
+ "key": "deviceInfo::build"
},
"location": {
"structure": { "parentKey": "system" },
- "key": "/mgmt/tm/cm/device::items::{{HOSTNAME}}::location",
+ "key": "/mgmt/tm/cm/device::items",
"normalization": [
{
- "convertArrayToMap": { "keyName": "name" }
+ "runFunctions": [{ "name": "normalizeMACAddress", "args": { "properties": ["baseMac"] } } ]
+ },
+ {
+ "convertArrayToMap": { "keyName": "baseMac" }, "useCurrentData": true, "keepKey": true
+ },
+ {
+ "runFunctions": [{ "name": "getValue", "args": { "path": ["{{ BASE_MAC_ADDR }}", "location" ] } } ]
}
]
},
"description": {
"structure": { "parentKey": "system" },
- "key": "/mgmt/tm/cm/device::items::{{HOSTNAME}}::description",
+ "key": "/mgmt/tm/cm/device::items",
"normalization": [
{
- "convertArrayToMap": { "keyName": "name" }
+ "runFunctions": [{ "name": "normalizeMACAddress", "args": { "properties": ["baseMac"] } } ]
+ },
+ {
+ "convertArrayToMap": { "keyName": "baseMac" }, "useCurrentData": true, "keepKey": true
+ },
+ {
+ "runFunctions": [{ "name": "getValue", "args": { "path": ["{{ BASE_MAC_ADDR }}", "description" ] } } ]
}
]
},
"marketingName": {
"structure": { "parentKey": "system" },
- "key": "/mgmt/tm/cm/device::items::{{HOSTNAME}}::marketingName",
- "normalization": [
- {
- "convertArrayToMap": { "keyName": "name" }
- }
- ]
+ "key": "deviceInfo::platformMarketingName"
},
"platformId": {
"structure": { "parentKey": "system" },
- "key": "/mgmt/tm/cm/device::items::{{HOSTNAME}}::platformId",
- "normalization": [
- {
- "convertArrayToMap": { "keyName": "name" }
- }
- ]
+ "key": "deviceInfo::platform"
},
"chassisId": {
"structure": { "parentKey": "system" },
- "key": "/mgmt/tm/cm/device::items::{{HOSTNAME}}::chassisId",
+ "key": "deviceInfo::chassisSerialNumber"
+ },
+ "baseMac": {
+ "structure": { "parentKey": "system" },
+ "key": "deviceInfo::baseMac",
"normalization": [
{
- "convertArrayToMap": { "keyName": "name" }
+ "runFunctions": [{ "name": "normalizeMACAddress" }]
}
]
},
- "baseMac": {
- "structure": { "parentKey": "system" },
- "key": "/mgmt/tm/sys/hardware::sys/hardware/platform::sys/hardware/platform/0::baseMac"
- },
"callBackUrl": {
"structure": { "parentKey": "system" },
"key": "/mgmt/tm/sys/management-ip::items",
@@ -164,20 +166,25 @@
},
"cpu": {
"structure": { "parentKey": "system" },
- "key": "/mgmt/tm/sys/host-info::sys/host-info/0::sys/hostInfo/0/cpuInfo",
+ "key": "/mgmt/tm/sys/host-info",
"normalization": [
{
- "runFunctions": [{ "name": "getAverage", "args": { "keyWithValue": "oneMinAvgSystem" } }]
+ "runFunctions": [
+ { "name": "restructureHostCpuInfo", "args": { "keyPattern": "^sys/host-info/\\d+::^sys/hostInfo/\\d+/cpuInfo" } },
+ { "name": "getAverage", "args": { "keyWithValue": "oneMinAvgSystem" } }
+ ]
}
],
"comment": "also oneMinAvgUser, need to determine how that should be factored in"
},
"memory": {
"structure": { "parentKey": "system" },
- "key": "/mgmt/tm/sys/memory::sys/memory/memory-host::sys/memory/memory-host/0",
+ "key": "/mgmt/tm/sys/memory::sys/memory/memory-host",
"normalization": [
{
- "runFunctions": [{ "name": "getPercentFromKeys", "args": { "totalKey": "memoryTotal", "partialKey": "memoryUsed" } }]
+ "runFunctions": [
+ { "name": "getPercentFromKeys", "args": { "totalKey": "memoryTotal", "partialKey": "memoryUsed", "nestedObjects": true } }
+ ]
}
]
},
@@ -192,10 +199,12 @@
},
"tmmMemory": {
"structure": { "parentKey": "system" },
- "key": "/mgmt/tm/sys/memory::sys/memory/memory-host::sys/memory/memory-host/0",
+ "key": "/mgmt/tm/sys/memory::sys/memory/memory-host",
"normalization": [
{
- "runFunctions": [{ "name": "getPercentFromKeys", "args": { "totalKey": "tmmMemoryTotal", "partialKey": "tmmMemoryUsed" } }]
+ "runFunctions": [
+ { "name": "getPercentFromKeys", "args": { "totalKey": "tmmMemoryTotal", "partialKey": "tmmMemoryUsed", "nestedObjects": true } }
+ ]
}
]
},
@@ -260,6 +269,124 @@
}
]
},
+ "asmState": {
+ "structure": { "parentKey": "system" },
+ "if": {
+ "isModuleProvisioned": "asm"
+ },
+ "then": {
+ "key": "asmQuery::commandResult",
+ "normalization": [
+ {
+ "runFunctions": [
+ { "name": "formatAsJson", "args": { "type": "csv", "mapKey": "asm_state" } },
+ { "name": "getFirstKey" }
+ ]
+ }
+ ]
+ },
+ "else": {
+ "disabled": true
+ }
+ },
+ "lastAsmChange": {
+ "structure": { "parentKey": "system" },
+ "if": {
+ "isModuleProvisioned": "asm"
+ },
+ "then": {
+ "key": "asmQuery::commandResult",
+ "normalization": [
+ {
+ "runFunctions": [
+ { "name": "formatAsJson", "args": { "type": "csv", "mapKey": "last_asm_change", "renameKeys": { "patterns": { "NULL": { "constant": "" } } } } },
+ { "name": "getFirstKey" }
+ ]
+ }
+ ]
+ },
+ "else": {
+ "disabled": true
+ }
+ },
+ "apmState": {
+ "structure": { "parentKey": "system" },
+ "if": {
+ "isModuleProvisioned": "apm"
+ },
+ "then": {
+ "key": "apmState::commandResult",
+ "normalization": [
+ {
+ "runFunctions": [
+ { "name": "formatAsJson", "args": { "type": "csv", "mapKey": "apm_state", "renameKeys": { "patterns": { "NULL": { "constant": "" } } } } },
+ { "name": "getFirstKey" }
+ ]
+ }
+ ]
+ },
+ "else": {
+ "disabled": true
+ }
+ },
+ "afmState": {
+ "structure": { "parentKey": "system" },
+ "if": {
+ "isModuleProvisioned": "afm"
+ },
+ "then": {
+ "key": "firewallCurrentState::pccdStatus",
+ "normalization": [
+ {
+ "includeFirstEntry": { "pattern": "/stats" }
+ }
+ ]
+ },
+ "else": {
+ "disabled": true
+ }
+ },
+ "lastAfmDeploy": {
+ "structure": { "parentKey": "system" },
+ "if": {
+ "isModuleProvisioned": "afm"
+ },
+ "then": {
+ "key": "firewallCurrentState::ruleDeployEndTimeFmt",
+ "normalization": [
+ {
+ "includeFirstEntry": { "pattern": "/stats" }
+ }
+ ]
+ },
+ "else": {
+ "disabled": true
+ }
+ },
+ "ltmConfigTime": {
+ "structure": { "parentKey": "system" },
+ "if": {
+ "isModuleProvisioned": "ltm"
+ },
+ "then": {
+ "key": "ltmConfigTime::value"
+ },
+ "else": {
+ "disabled": true
+ }
+ },
+ "gtmConfigTime": {
+ "structure": { "parentKey": "system" },
+ "if": {
+ "isModuleProvisioned": "gtm"
+ },
+ "then": {
+ "key": "gtmConfigTime::value"
+ },
+ "else": {
+ "disabled": true
+ }
+ },
"aWideIps": {
"if": {
"isModuleProvisioned": "gtm"
@@ -609,10 +736,15 @@
"filterKeys": { "exclude": [ "fullPath", "generation", "appServiceReference", "tmName", "status.statusReason", "cmpEnabled", "cmpEnableMode", "csMaxConnDur", "csMeanConnDur", "csMinConnDur", "totRequests", "oneMinAvgUsageRatio", "clientside.pktsIn", "clientside.pktsOut", "clientside.evictedConns", "clientside.slowKilled", "clientside.maxConns", "clientside.totConns", "ephemeral.bitsIn", "ephemeral.bitsOut", "ephemeral.curConns", "ephemeral.evictedConns", "ephemeral.maxConns", "ephemeral.pktsIn", "ephemeral.pktsOut", "ephemeral.slowKilled", "ephemeral.totConns", "fiveSecAvgUsageRatio", "fiveMinAvgUsageRatio", "syncookieStatus", "syncookie.accepts", "syncookie.hwAccepts", "syncookie.hwSyncookies", "syncookie.rejects", "syncookie.hwsyncookieInstance", "syncookie.swsyncookieInstance", "syncookie.syncacheCurr", "syncookie.syncacheOver", "syncookie.syncookies", "syncookie.syncacheOver", "poolReference" ] }
},
{
- "renameKeys": { "patterns": { "name/": { "pattern": "name\/(.*)", "group": 1 }, "ltm/virtual": { "pattern": "virtual\/(.*)\/", "group": 1 } } }
+ "renameKeys": { "patterns": { "name/": { "pattern": "name\/(.*)", "group": 1 }, "ltm/virtual": { "pattern": "virtual\/(.*)\/", "group": 1 }, "profilesReference": "profiles" } }
},
{
"addKeysByTag": true
+ },
+ {
+ "runFunctions": [
+ { "name": "restructureVirtualServerProfiles" }
+ ]
}
]
},
@@ -740,124 +872,6 @@
}
]
},
- "asmState": {
- "structure": { "parentKey": "system" },
- "if": {
- "isModuleProvisioned": "asm"
- },
- "then": {
- "key": "asmQuery::commandResult",
- "normalization": [
- {
- "runFunctions": [
- { "name": "formatAsJson", "args": { "type": "csv", "mapKey": "asm_state" } },
- { "name": "getFirstKey" }
- ]
- }
- ]
- },
- "else": {
- "disabled": true
- }
- },
- "lastAsmChange": {
- "structure": { "parentKey": "system" },
- "if": {
- "isModuleProvisioned": "asm"
- },
- "then": {
- "key": "asmQuery::commandResult",
- "normalization": [
- {
- "runFunctions": [
- { "name": "formatAsJson", "args": { "type": "csv", "mapKey": "last_asm_change", "renameKeys": { "patterns": { "NULL": { "constant": "" } } } } },
- { "name": "getFirstKey" }
- ]
- }
- ]
- },
- "else": {
- "disabled": true
- }
- },
- "apmState": {
- "structure": { "parentKey": "system" },
- "if": {
- "isModuleProvisioned": "apm"
- },
- "then": {
- "key": "apmState::commandResult",
- "normalization": [
- {
- "runFunctions": [
- { "name": "formatAsJson", "args": { "type": "csv", "mapKey": "apm_state", "renameKeys": { "patterns": { "NULL": { "constant": "" } } } } },
- { "name": "getFirstKey" }
- ]
- }
- ]
- },
- "else": {
- "disabled": true
- }
- },
- "afmState": {
- "structure": { "parentKey": "system" },
- "if": {
- "isModuleProvisioned": "afm"
- },
- "then": {
- "key": "firewallCurrentState::pccdStatus",
- "normalization": [
- {
- "includeFirstEntry": { "pattern": "/stats" }
- }
- ]
- },
- "else": {
- "disabled": true
- }
- },
- "lastAfmDeploy": {
- "structure": { "parentKey": "system" },
- "if": {
- "isModuleProvisioned": "afm"
- },
- "then": {
- "key": "firewallCurrentState::ruleDeployEndTimeFmt",
- "normalization": [
- {
- "includeFirstEntry": { "pattern": "/stats" }
- }
- ]
- },
- "else": {
- "disabled": true
- }
- },
- "ltmConfigTime": {
- "structure": { "parentKey": "system" },
- "if": {
- "isModuleProvisioned": "ltm"
- },
- "then": {
- "key": "ltmConfigTime::value"
- },
- "else": {
- "disabled": true
- }
- },
- "gtmConfigTime": {
- "structure": { "parentKey": "system" },
- "if": {
- "isModuleProvisioned": "gtm"
- },
- "then": {
- "key": "gtmConfigTime::value"
- },
- "else": {
- "disabled": true
- }
- },
"iRules": {
"key": "iRules",
"normalization": [
@@ -886,7 +900,13 @@
"keyArgs": { "replaceStrings": { "\\$tmctlArgs": "-c asm_cpu_util_stats" } },
"normalization": [
{
- "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv" } }]
+ "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv", "mapKey": "vs_name" } }]
+ },
+ {
+ "addKeysByTag": { "tags": { "tenant": "`T`", "application": "`A`" } }
+ },
+ {
+ "runFunctions": [{ "name": "convertMapToArray" }]
}
]
},
@@ -936,7 +956,13 @@
"keyArgs": { "replaceStrings": { "\\$tmctlArgs": "-c dos_stat" } },
"normalization": [
{
- "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv" } }]
+ "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv", "mapKey": "vs_name" } }]
+ },
+ {
+ "addKeysByTag": { "tags": { "tenant": "`T`", "application": "`A`" } }
+ },
+ {
+ "runFunctions": [{ "name": "convertMapToArray" }]
}
]
},
@@ -966,7 +992,13 @@
"keyArgs": { "replaceStrings": { "\\$tmctlArgs": "-c flow_eviction_policy_stat" } },
"normalization": [
{
- "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv" } }]
+ "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv", "mapKey": "context_name" } }]
+ },
+ {
+ "addKeysByTag": { "tags": { "tenant": "`T`", "application": "`A`" } }
+ },
+ {
+ "runFunctions": [{ "name": "convertMapToArray" }]
}
]
},
@@ -1126,7 +1158,13 @@
"keyArgs": { "replaceStrings": { "\\$tmctlArgs": "-c pool_member_stat" } },
"normalization": [
{
- "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv" } }]
+ "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv", "mapKey": "pool_name" } }]
+ },
+ {
+ "addKeysByTag": { "tags": { "tenant": "`T`", "application": "`A`" } }
+ },
+ {
+ "runFunctions": [{ "name": "convertMapToArray" }]
}
]
},
@@ -1156,7 +1194,13 @@
"keyArgs": { "replaceStrings": { "\\$tmctlArgs": "-c profile_bigproto_stat" } },
"normalization": [
{
- "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv" } }]
+ "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv", "mapKey": "vs_name" } }]
+ },
+ {
+ "addKeysByTag": { "tags": { "tenant": "`T`", "application": "`A`" } }
+ },
+ {
+ "runFunctions": [{ "name": "convertMapToArray" }]
}
]
},
@@ -1166,7 +1210,13 @@
"keyArgs": { "replaceStrings": { "\\$tmctlArgs": "-c profile_clientssl_stat" } },
"normalization": [
{
- "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv" } }]
+ "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv", "mapKey": "vs_name" } }]
+ },
+ {
+ "addKeysByTag": { "tags": { "tenant": "`T`", "application": "`A`" } }
+ },
+ {
+ "runFunctions": [{ "name": "convertMapToArray" }]
}
]
},
@@ -1176,7 +1226,13 @@
"keyArgs": { "replaceStrings": { "\\$tmctlArgs": "-c profile_connpool_stat" } },
"normalization": [
{
- "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv" } }]
+ "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv", "mapKey": "vs_name" } }]
+ },
+ {
+ "addKeysByTag": { "tags": { "tenant": "`T`", "application": "`A`" } }
+ },
+ {
+ "runFunctions": [{ "name": "convertMapToArray" }]
}
]
},
@@ -1186,7 +1242,13 @@
"keyArgs": { "replaceStrings": { "\\$tmctlArgs": "-c profile_dns_stat" } },
"normalization": [
{
- "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv" } }]
+ "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv", "mapKey": "vs_name" } }]
+ },
+ {
+ "addKeysByTag": { "tags": { "tenant": "`T`", "application": "`A`" } }
+ },
+ {
+ "runFunctions": [{ "name": "convertMapToArray" }]
}
]
},
@@ -1196,7 +1258,13 @@
"keyArgs": { "replaceStrings": { "\\$tmctlArgs": "-c profile_ftp_stat" } },
"normalization": [
{
- "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv" } }]
+ "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv", "mapKey": "vs_name" } }]
+ },
+ {
+ "addKeysByTag": { "tags": { "tenant": "`T`", "application": "`A`" } }
+ },
+ {
+ "runFunctions": [{ "name": "convertMapToArray" }]
}
]
},
@@ -1206,7 +1274,13 @@
"keyArgs": { "replaceStrings": { "\\$tmctlArgs": "-c profile_http_stat" } },
"normalization": [
{
- "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv" } }]
+ "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv", "mapKey": "vs_name" } }]
+ },
+ {
+ "addKeysByTag": { "tags": { "tenant": "`T`", "application": "`A`" } }
+ },
+ {
+ "runFunctions": [{ "name": "convertMapToArray" }]
}
]
},
@@ -1216,7 +1290,13 @@
"keyArgs": { "replaceStrings": { "\\$tmctlArgs": "-c profile_httpcompression_stat" } },
"normalization": [
{
- "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv" } }]
+ "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv", "mapKey": "vs_name" } }]
+ },
+ {
+ "addKeysByTag": { "tags": { "tenant": "`T`", "application": "`A`" } }
+ },
+ {
+ "runFunctions": [{ "name": "convertMapToArray" }]
}
]
},
@@ -1226,7 +1306,13 @@
"keyArgs": { "replaceStrings": { "\\$tmctlArgs": "-c profile_serverssl_stat" } },
"normalization": [
{
- "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv" } }]
+ "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv", "mapKey": "vs_name" } }]
+ },
+ {
+ "addKeysByTag": { "tags": { "tenant": "`T`", "application": "`A`" } }
+ },
+ {
+ "runFunctions": [{ "name": "convertMapToArray" }]
}
]
},
@@ -1236,7 +1322,13 @@
"keyArgs": { "replaceStrings": { "\\$tmctlArgs": "-c profile_tcp_stat" } },
"normalization": [
{
- "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv" } }]
+ "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv", "mapKey": "vs_name" } }]
+ },
+ {
+ "addKeysByTag": { "tags": { "tenant": "`T`", "application": "`A`" } }
+ },
+ {
+ "runFunctions": [{ "name": "convertMapToArray" }]
}
]
},
@@ -1246,7 +1338,13 @@
"keyArgs": { "replaceStrings": { "\\$tmctlArgs": "-c profile_udp_stat" } },
"normalization": [
{
- "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv" } }]
+ "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv", "mapKey": "vs_name" } }]
+ },
+ {
+ "addKeysByTag": { "tags": { "tenant": "`T`", "application": "`A`" } }
+ },
+ {
+ "runFunctions": [{ "name": "convertMapToArray" }]
}
]
},
@@ -1256,7 +1354,13 @@
"keyArgs": { "replaceStrings": { "\\$tmctlArgs": "-c profile_webacceleration_stat" } },
"normalization": [
{
- "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv" } }]
+ "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv", "mapKey": "vs_name" } }]
+ },
+ {
+ "addKeysByTag": { "tags": { "tenant": "`T`", "application": "`A`" } }
+ },
+ {
+ "runFunctions": [{ "name": "convertMapToArray" }]
}
]
},
@@ -1326,7 +1430,7 @@
"keyArgs": { "replaceStrings": { "\\$tmctlArgs": "-c virtual_server_conn_stat" } },
"normalization": [
{
- "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv" } }]
+ "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv", "$comment": "can't dump table on 12.0+" } }]
}
]
},
@@ -1336,7 +1440,13 @@
"keyArgs": { "replaceStrings": { "\\$tmctlArgs": "-c virtual_server_cpu_stat" } },
"normalization": [
{
- "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv" } }]
+ "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv", "mapKey": "name" } }]
+ },
+ {
+ "addKeysByTag": { "tags": { "tenant": "`T`", "application": "`A`" } }
+ },
+ {
+ "runFunctions": [{ "name": "convertMapToArray" }]
}
]
},
@@ -1346,7 +1456,13 @@
"keyArgs": { "replaceStrings": { "\\$tmctlArgs": "-c virtual_server_stat" } },
"normalization": [
{
- "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv" } }]
+ "runFunctions": [{ "name": "formatAsJson", "args": { "type": "csv", "mapKey": "name" } }]
+ },
+ {
+ "addKeysByTag": { "tags": { "tenant": "`T`", "application": "`A`" } }
+ },
+ {
+ "runFunctions": [{ "name": "convertMapToArray" }]
}
]
}
@@ -1357,7 +1473,10 @@
"AVR": { "keys": [ { "required": [ "EOCTimestamp" ], "optional": [ "AggrInterval", "Microtimestamp", "STAT_SRC", "Entity", "errdefs_msgno" ] } ] },
"ASM": { "keys": [ { "required": [ "policy_name" ], "optional": [ "policy_apply_date", "request_status" ] } ] },
"APM": { "keys": [ { "required": [ "Access_Profile" ] } ] },
- "AFM": { "keys": [ { "required": [ "acl_policy_name" ], "optional": [ "acl_policy_type", "acl_rule_name" ] } ] },
+ "AFM": { "keys": [
+ { "required": [ "acl_policy_name" ], "optional": [ "acl_policy_type", "acl_rule_name" ] },
+ { "required": [ "dos_attack_id" ], "optional": [ "flow_id", "action", "errdefs_msg_name" ] }
+ ]},
"CGNAT": { "keys": [ { "required": [ "lsn_event" ], "optional": [ "lsn_client", "lsn_pb", "start" ] } ] }
}
},
@@ -1379,13 +1498,20 @@
},
"context": {
"HOSTNAME": {
- "key": "/mgmt/tm/sys/global-settings::hostname"
+ "key": "deviceInfo::hostname"
+ },
+ "BASE_MAC_ADDR": {
+ "key": "deviceInfo::baseMac",
+ "normalization": [
+ {
+ "runFunctions": [{ "name": "normalizeMACAddress" }]
+ }
+ ]
},
"deviceVersion": {
- "key": "/mgmt/tm/sys/version::sys/version/0::Version"
+ "key": "deviceInfo::version"
},
"provisioning": {
- "structure": { "parentKey": "system" },
"key": "provisioning::items",
"normalization": [
{
diff --git a/src/lib/systemPoller.js b/src/lib/systemPoller.js
index df940db8..8f3b5246 100644
--- a/src/lib/systemPoller.js
+++ b/src/lib/systemPoller.js
@@ -8,20 +8,215 @@
'use strict';
-const logger = require('./logger.js'); // eslint-disable-line no-unused-vars
-const constants = require('./constants.js');
-const util = require('./util.js');
-const deviceUtil = require('./deviceUtil.js');
-const configWorker = require('./config.js');
-const SystemStats = require('./systemStats.js');
-const dataPipeline = require('./dataPipeline.js');
-
-const SYSTEM_CLASS_NAME = constants.SYSTEM_CLASS_NAME;
-const SYSTEM_POLLER_CLASS_NAME = constants.SYSTEM_POLLER_CLASS_NAME;
-const pollerIDs = {};
+const constants = require('./constants');
+const configWorker = require('./config');
+const dataPipeline = require('./dataPipeline');
+const deviceUtil = require('./deviceUtil');
+const logger = require('./logger'); // eslint-disable-line no-unused-vars
+const normalizeConfig = require('./normalizeConfig');
+const SystemStats = require('./systemStats');
+const util = require('./util');
/** @module systemPoller */
+const CONFIG_CLASSES = constants.CONFIG_CLASSES;
+// use SYSTEM_POLLER_CLASS_NAME to keep compatibility with previous versions
+// but it is possible use SYSTEM_CLASS_NAME instead too
+const TRACER_CLASS_NAME = CONFIG_CLASSES.SYSTEM_POLLER_CLASS_NAME;
+// key - poller name, value - timer ID
+const POLLER_TIMERS = {};
+
+function getPollerTimers() {
+ return POLLER_TIMERS;
+}
+
+function getTelemetryObjects(originalConfig, className) {
+ return originalConfig[className] || {};
+}
+
+function getTelemetrySystems(originalConfig) {
+ return getTelemetryObjects(originalConfig, CONFIG_CLASSES.SYSTEM_CLASS_NAME);
+}
+
+function getTelemetrySystemPollers(originalConfig) {
+ return getTelemetryObjects(originalConfig, CONFIG_CLASSES.SYSTEM_POLLER_CLASS_NAME);
+}
+
+function getTelemetryConsumers(originalConfig) {
+ return getTelemetryObjects(originalConfig, CONFIG_CLASSES.CONSUMER_CLASS_NAME);
+}
+
+function createCustomConfig(originalConfig, sysOrPollerName, pollerName) {
+ // originalConfig is not normalized yet
+ let systems = getTelemetrySystems(originalConfig);
+ let pollers = getTelemetrySystemPollers(originalConfig);
+ let system;
+ let poller;
+
+ if (sysOrPollerName && pollerName) {
+ system = systems[sysOrPollerName];
+ poller = pollers[pollerName];
+ } else {
+ // each object has unique name across the entire declaration.
+ // so, one of them will be 'undefined'
+ system = systems[sysOrPollerName];
+ poller = pollers[sysOrPollerName];
+ }
+
+ const systemFound = !util.isObjectEmpty(system);
+ const pollerFound = !util.isObjectEmpty(poller);
+ // check for errors at first
+ if (!systemFound || !pollerFound) {
+ if (pollerName) {
+ // sysOrPollerName and pollerName both passed to the function
+ if (!systemFound) {
+ throw new Error(`System with name '${sysOrPollerName}' doesn't exist`);
+ }
+ if (!pollerFound) {
+ throw new Error(`System Poller with name '${pollerName}' doesn't exist`);
+ }
+ }
+ if (!(systemFound || pollerFound)) {
+ throw new Error(`System or System Poller with name '${sysOrPollerName}' doesn't exist`);
+ }
+ if (systemFound && util.isObjectEmpty(system.systemPoller)) {
+ throw new Error(`System with name '${sysOrPollerName}' has no System Poller configured`);
+ }
+ }
+ // error check passed and now we have valid objects to continue with
+ if (systemFound && pollerFound) {
+ systems = { [sysOrPollerName]: system };
+ pollers = { [pollerName]: poller };
+ system.systemPoller = pollerName;
+ } else if (pollerFound) {
+ systems = {};
+ pollers = { [sysOrPollerName]: poller };
+ } else {
+ const newPollers = {};
+ systems = { [sysOrPollerName]: system };
+
+ system.systemPoller = Array.isArray(system.systemPoller) ? system.systemPoller
+ : [system.systemPoller];
+
+ system.systemPoller.forEach((pollerVal) => {
+ if (typeof pollerVal === 'string') {
+ newPollers[pollerVal] = pollers[pollerVal];
+ }
+ });
+ pollers = newPollers;
+ }
+ originalConfig[CONFIG_CLASSES.SYSTEM_CLASS_NAME] = systems;
+ originalConfig[CONFIG_CLASSES.SYSTEM_POLLER_CLASS_NAME] = pollers;
+ return originalConfig;
+}
+
+/**
+ * Compute trace's value from System and System Poller config
+ *
+ * @param {Boolean|String} [systemTrace] - system's trace config
+ * @param {Boolean|String} [pollerTrace] - poller's trace config
+ *
+ * @returns {Boolean|String} trace's value
+ */
+function getTraceValue(systemTrace, pollerTrace) {
+ if (typeof systemTrace === 'undefined' && typeof pollerTrace === 'undefined') {
+ pollerTrace = false;
+ } else {
+ // we know that one of the values is defined (or both)
+ // set default value to true to do not block tracer usage
+ systemTrace = typeof systemTrace === 'undefined' ? true : systemTrace;
+ pollerTrace = typeof pollerTrace === 'undefined' ? true : pollerTrace;
+ if (typeof pollerTrace === 'string') {
+ // preserve poller's value
+ pollerTrace = systemTrace && pollerTrace;
+ } else if (pollerTrace === true) {
+ // preserve system's value
+ pollerTrace = systemTrace;
+ }
+ }
+ return pollerTrace;
+}
+
+function createPollerConfig(systemConfig, pollerConfig, fetchTMStats) {
+ return {
+ name: `${systemConfig.name}::${pollerConfig.name}`,
+ enable: Boolean(systemConfig.enable && pollerConfig.enable),
+ trace: module.exports.getTraceValue(systemConfig.trace, pollerConfig.trace),
+ interval: pollerConfig.interval,
+ connection: {
+ host: systemConfig.host,
+ port: systemConfig.port,
+ protocol: systemConfig.protocol,
+ allowSelfSignedCert: systemConfig.allowSelfSignedCert
+ },
+ credentials: {
+ username: systemConfig.username,
+ passphrase: systemConfig.passphrase
+ },
+ dataOpts: {
+ tags: pollerConfig.tag,
+ actions: pollerConfig.actions,
+ noTMStats: !fetchTMStats
+ },
+ endpointList: pollerConfig.endpointList
+ };
+}
+
+function getEnabledPollerConfigs(systemObj, fetchTMStats, includeDisabled) {
+ const pollers = [];
+ if (systemObj.enable || includeDisabled) {
+ systemObj.systemPollers.forEach((pollerConfig) => {
+ if (pollerConfig.enable || includeDisabled) {
+ const newPollerConfig = module.exports.createPollerConfig(systemObj, pollerConfig, fetchTMStats);
+ pollers.push(newPollerConfig);
+ }
+ });
+ }
+ return pollers;
+}
+
+function hasSplunkLegacy(originalConfig) {
+ const consumers = getTelemetryConsumers(originalConfig);
+ return Object.keys(consumers).some(consumerKey => consumers[consumerKey].type === 'Splunk'
+ && consumers[consumerKey].format === 'legacy');
+}
+
+function applyConfig(originalConfig) {
+ const systems = getTelemetrySystems(originalConfig);
+ const fetchTMStats = hasSplunkLegacy(originalConfig);
+ const newPollerIDs = [];
+ const currPollerIDs = module.exports.getPollerTimers();
+
+ Object.keys(systems).forEach((systemName) => {
+ module.exports.getEnabledPollerConfigs(systems[systemName], fetchTMStats).forEach((pollerConfig) => {
+ newPollerIDs.push(pollerConfig.name);
+ pollerConfig.tracer = util.tracer.createFromConfig(
+ TRACER_CLASS_NAME, pollerConfig.name, pollerConfig
+ );
+ const baseMsg = `system poller ${pollerConfig.name}. Interval = ${pollerConfig.interval} sec.`;
+ if (currPollerIDs[pollerConfig.name]) {
+ logger.info(`Updating ${baseMsg}`);
+ currPollerIDs[pollerConfig.name] = util.update(
+ currPollerIDs[pollerConfig.name], module.exports.safeProcess, pollerConfig, pollerConfig.interval
+ );
+ } else {
+ logger.info(`Starting ${baseMsg}`);
+ currPollerIDs[pollerConfig.name] = util.start(
+ module.exports.safeProcess, pollerConfig, pollerConfig.interval
+ );
+ }
+ });
+ });
+
+ Object.keys(currPollerIDs).forEach((key) => {
+ if (newPollerIDs.indexOf(key) === -1) {
+ logger.info(`Disabling system poller ${key}`);
+ util.stop(currPollerIDs[key]);
+ delete currPollerIDs[key];
+ }
+ });
+}
+
/**
* Process system(s) stats
*
@@ -34,33 +229,34 @@ const pollerIDs = {};
* @returns {Promise} Promise which is resolved with data sent
*/
function process() {
- const args = arguments[0];
+ const config = arguments[0];
const options = arguments.length > 1 ? arguments[1] : {};
-
- const config = args.config;
- const tracer = args.tracer;
+ const tracer = config.tracer;
const startTimestamp = new Date().toISOString();
logger.debug('System poller cycle started');
- const systemStats = new SystemStats(config.host, config.options);
+ const systemStats = new SystemStats(config);
return systemStats.collect()
.then((normalizedData) => {
- const endTimeStamp = new Date().toISOString();
// inject service data
const telemetryServiceInfo = {
- pollingInterval: args.interval,
+ pollingInterval: config.interval,
cycleStart: startTimestamp,
- cycleEnd: endTimeStamp
+ cycleEnd: (new Date()).toISOString()
};
normalizedData.telemetryServiceInfo = telemetryServiceInfo;
normalizedData.telemetryEventCategory = constants.EVENT_TYPES.SYSTEM_POLLER;
// end inject service data
- const dataCtx = { data: normalizedData, type: constants.EVENT_TYPES.SYSTEM_POLLER };
+ const dataCtx = {
+ data: normalizedData,
+ type: constants.EVENT_TYPES.SYSTEM_POLLER,
+ isCustom: systemStats.isCustom
+ };
return dataPipeline.process(dataCtx, {
noConsumers: options.requestFromUser,
tracer,
- actions: config.options.actions,
+ actions: config.dataOpts.actions,
deviceContext: systemStats.contextData
});
})
@@ -82,16 +278,24 @@ function process() {
* @returns {Promise.